id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
241,600
|
bitlabstudio/django-unshorten
|
unshorten/rate_limit.py
|
SimpleRateLimit.get_history
|
def get_history(self):
"""Returns the history from cache or DB or a newly created one."""
if hasattr(self, '_history'):
return self._history
try:
self._history = APICallDayHistory.objects.get(
user=self.user, creation_date=now().date())
except APICallDayHistory.DoesNotExist:
self._history = APICallDayHistory(user=self.user)
self._history.amount_api_calls = 0
return self._history
|
python
|
def get_history(self):
"""Returns the history from cache or DB or a newly created one."""
if hasattr(self, '_history'):
return self._history
try:
self._history = APICallDayHistory.objects.get(
user=self.user, creation_date=now().date())
except APICallDayHistory.DoesNotExist:
self._history = APICallDayHistory(user=self.user)
self._history.amount_api_calls = 0
return self._history
|
[
"def",
"get_history",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'_history'",
")",
":",
"return",
"self",
".",
"_history",
"try",
":",
"self",
".",
"_history",
"=",
"APICallDayHistory",
".",
"objects",
".",
"get",
"(",
"user",
"=",
"self",
".",
"user",
",",
"creation_date",
"=",
"now",
"(",
")",
".",
"date",
"(",
")",
")",
"except",
"APICallDayHistory",
".",
"DoesNotExist",
":",
"self",
".",
"_history",
"=",
"APICallDayHistory",
"(",
"user",
"=",
"self",
".",
"user",
")",
"self",
".",
"_history",
".",
"amount_api_calls",
"=",
"0",
"return",
"self",
".",
"_history"
] |
Returns the history from cache or DB or a newly created one.
|
[
"Returns",
"the",
"history",
"from",
"cache",
"or",
"DB",
"or",
"a",
"newly",
"created",
"one",
"."
] |
6d184de908bb9df3aad5ac3fd9732d976afb6953
|
https://github.com/bitlabstudio/django-unshorten/blob/6d184de908bb9df3aad5ac3fd9732d976afb6953/unshorten/rate_limit.py#L14-L24
|
241,601
|
bitlabstudio/django-unshorten
|
unshorten/rate_limit.py
|
SimpleRateLimit.is_rate_limit_exceeded
|
def is_rate_limit_exceeded(self):
"""Returns ``True`` if the rate limit is exceeded, otherwise False."""
history = self.get_history()
if history.amount_api_calls >= settings.UNSHORTEN_DAILY_LIMIT:
return True
return False
|
python
|
def is_rate_limit_exceeded(self):
"""Returns ``True`` if the rate limit is exceeded, otherwise False."""
history = self.get_history()
if history.amount_api_calls >= settings.UNSHORTEN_DAILY_LIMIT:
return True
return False
|
[
"def",
"is_rate_limit_exceeded",
"(",
"self",
")",
":",
"history",
"=",
"self",
".",
"get_history",
"(",
")",
"if",
"history",
".",
"amount_api_calls",
">=",
"settings",
".",
"UNSHORTEN_DAILY_LIMIT",
":",
"return",
"True",
"return",
"False"
] |
Returns ``True`` if the rate limit is exceeded, otherwise False.
|
[
"Returns",
"True",
"if",
"the",
"rate",
"limit",
"is",
"exceeded",
"otherwise",
"False",
"."
] |
6d184de908bb9df3aad5ac3fd9732d976afb6953
|
https://github.com/bitlabstudio/django-unshorten/blob/6d184de908bb9df3aad5ac3fd9732d976afb6953/unshorten/rate_limit.py#L26-L31
|
241,602
|
bitlabstudio/django-unshorten
|
unshorten/rate_limit.py
|
SimpleRateLimit.log_api_call
|
def log_api_call(self):
"""Increases the amount of logged API calls for the user by 1."""
history = self.get_history()
history.amount_api_calls += 1
self._history = history.save()
return history
|
python
|
def log_api_call(self):
"""Increases the amount of logged API calls for the user by 1."""
history = self.get_history()
history.amount_api_calls += 1
self._history = history.save()
return history
|
[
"def",
"log_api_call",
"(",
"self",
")",
":",
"history",
"=",
"self",
".",
"get_history",
"(",
")",
"history",
".",
"amount_api_calls",
"+=",
"1",
"self",
".",
"_history",
"=",
"history",
".",
"save",
"(",
")",
"return",
"history"
] |
Increases the amount of logged API calls for the user by 1.
|
[
"Increases",
"the",
"amount",
"of",
"logged",
"API",
"calls",
"for",
"the",
"user",
"by",
"1",
"."
] |
6d184de908bb9df3aad5ac3fd9732d976afb6953
|
https://github.com/bitlabstudio/django-unshorten/blob/6d184de908bb9df3aad5ac3fd9732d976afb6953/unshorten/rate_limit.py#L33-L38
|
241,603
|
jalanb/pysyte
|
pysyte/lists.py
|
de_duplicate
|
def de_duplicate(items):
"""Remove any duplicate item, preserving order
>>> de_duplicate([1, 2, 1, 2])
[1, 2]
"""
result = []
for item in items:
if item not in result:
result.append(item)
return result
|
python
|
def de_duplicate(items):
"""Remove any duplicate item, preserving order
>>> de_duplicate([1, 2, 1, 2])
[1, 2]
"""
result = []
for item in items:
if item not in result:
result.append(item)
return result
|
[
"def",
"de_duplicate",
"(",
"items",
")",
":",
"result",
"=",
"[",
"]",
"for",
"item",
"in",
"items",
":",
"if",
"item",
"not",
"in",
"result",
":",
"result",
".",
"append",
"(",
"item",
")",
"return",
"result"
] |
Remove any duplicate item, preserving order
>>> de_duplicate([1, 2, 1, 2])
[1, 2]
|
[
"Remove",
"any",
"duplicate",
"item",
"preserving",
"order"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/lists.py#L4-L14
|
241,604
|
JNRowe/jnrbase
|
jnrbase/config.py
|
read_configs
|
def read_configs(__pkg: str, __name: str = 'config', *,
local: bool = True) -> ConfigParser:
"""Process configuration file stack.
We export the time parsing functionality of ``jnrbase`` as custom
converters for :class:`configparser.ConfigParser`:
=================== ===========================================
Method Function
=================== ===========================================
``.getdatetime()`` :func:`~jnrbase.iso_8601.parse_datetime`
``.gethumantime()`` :func:`~jnrbase.human_time.parse_timedelta`
``.gettimedelta()`` :func:`~jnrbase.iso_8601.parse_delta`
=================== ===========================================
Args:
__pkg: Package name to use as base for config files
__name: File name to search for within config directories
local: Whether to include config files from current directory
Returns:
Parsed configuration files
"""
configs = get_configs(__pkg, __name)
if local:
localrc = path.abspath('.{}rc'.format(__pkg))
if path.exists(localrc):
configs.append(localrc)
cfg = ConfigParser(converters={
'datetime': parse_datetime,
'humandelta': parse_timedelta,
'timedelta': parse_delta,
})
cfg.read(configs, 'utf-8')
cfg.configs = configs
if 'NO_COLOUR' in environ or 'NO_COLOR' in environ:
cfg.colour = False
elif __pkg in cfg:
if 'colour' in cfg[__pkg]:
cfg.colour = cfg[__pkg].getboolean('colour')
if 'color' in cfg[__pkg]:
cfg.colour = cfg[__pkg].getboolean('color')
else:
cfg.colour = True
return cfg
|
python
|
def read_configs(__pkg: str, __name: str = 'config', *,
local: bool = True) -> ConfigParser:
"""Process configuration file stack.
We export the time parsing functionality of ``jnrbase`` as custom
converters for :class:`configparser.ConfigParser`:
=================== ===========================================
Method Function
=================== ===========================================
``.getdatetime()`` :func:`~jnrbase.iso_8601.parse_datetime`
``.gethumantime()`` :func:`~jnrbase.human_time.parse_timedelta`
``.gettimedelta()`` :func:`~jnrbase.iso_8601.parse_delta`
=================== ===========================================
Args:
__pkg: Package name to use as base for config files
__name: File name to search for within config directories
local: Whether to include config files from current directory
Returns:
Parsed configuration files
"""
configs = get_configs(__pkg, __name)
if local:
localrc = path.abspath('.{}rc'.format(__pkg))
if path.exists(localrc):
configs.append(localrc)
cfg = ConfigParser(converters={
'datetime': parse_datetime,
'humandelta': parse_timedelta,
'timedelta': parse_delta,
})
cfg.read(configs, 'utf-8')
cfg.configs = configs
if 'NO_COLOUR' in environ or 'NO_COLOR' in environ:
cfg.colour = False
elif __pkg in cfg:
if 'colour' in cfg[__pkg]:
cfg.colour = cfg[__pkg].getboolean('colour')
if 'color' in cfg[__pkg]:
cfg.colour = cfg[__pkg].getboolean('color')
else:
cfg.colour = True
return cfg
|
[
"def",
"read_configs",
"(",
"__pkg",
":",
"str",
",",
"__name",
":",
"str",
"=",
"'config'",
",",
"*",
",",
"local",
":",
"bool",
"=",
"True",
")",
"->",
"ConfigParser",
":",
"configs",
"=",
"get_configs",
"(",
"__pkg",
",",
"__name",
")",
"if",
"local",
":",
"localrc",
"=",
"path",
".",
"abspath",
"(",
"'.{}rc'",
".",
"format",
"(",
"__pkg",
")",
")",
"if",
"path",
".",
"exists",
"(",
"localrc",
")",
":",
"configs",
".",
"append",
"(",
"localrc",
")",
"cfg",
"=",
"ConfigParser",
"(",
"converters",
"=",
"{",
"'datetime'",
":",
"parse_datetime",
",",
"'humandelta'",
":",
"parse_timedelta",
",",
"'timedelta'",
":",
"parse_delta",
",",
"}",
")",
"cfg",
".",
"read",
"(",
"configs",
",",
"'utf-8'",
")",
"cfg",
".",
"configs",
"=",
"configs",
"if",
"'NO_COLOUR'",
"in",
"environ",
"or",
"'NO_COLOR'",
"in",
"environ",
":",
"cfg",
".",
"colour",
"=",
"False",
"elif",
"__pkg",
"in",
"cfg",
":",
"if",
"'colour'",
"in",
"cfg",
"[",
"__pkg",
"]",
":",
"cfg",
".",
"colour",
"=",
"cfg",
"[",
"__pkg",
"]",
".",
"getboolean",
"(",
"'colour'",
")",
"if",
"'color'",
"in",
"cfg",
"[",
"__pkg",
"]",
":",
"cfg",
".",
"colour",
"=",
"cfg",
"[",
"__pkg",
"]",
".",
"getboolean",
"(",
"'color'",
")",
"else",
":",
"cfg",
".",
"colour",
"=",
"True",
"return",
"cfg"
] |
Process configuration file stack.
We export the time parsing functionality of ``jnrbase`` as custom
converters for :class:`configparser.ConfigParser`:
=================== ===========================================
Method Function
=================== ===========================================
``.getdatetime()`` :func:`~jnrbase.iso_8601.parse_datetime`
``.gethumantime()`` :func:`~jnrbase.human_time.parse_timedelta`
``.gettimedelta()`` :func:`~jnrbase.iso_8601.parse_delta`
=================== ===========================================
Args:
__pkg: Package name to use as base for config files
__name: File name to search for within config directories
local: Whether to include config files from current directory
Returns:
Parsed configuration files
|
[
"Process",
"configuration",
"file",
"stack",
"."
] |
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
|
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/config.py#L29-L75
|
241,605
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendril._start
|
def _start(self):
"""
Starts the underlying send and receive threads.
"""
# Initialize the locks
self._recv_lock = coros.Semaphore(0)
self._send_lock = coros.Semaphore(0)
# Boot the threads
self._recv_thread = gevent.spawn(self._recv)
self._send_thread = gevent.spawn(self._send)
# Link the threads such that we get notified if one or the
# other exits
self._recv_thread.link(self._thread_error)
self._send_thread.link(self._thread_error)
|
python
|
def _start(self):
"""
Starts the underlying send and receive threads.
"""
# Initialize the locks
self._recv_lock = coros.Semaphore(0)
self._send_lock = coros.Semaphore(0)
# Boot the threads
self._recv_thread = gevent.spawn(self._recv)
self._send_thread = gevent.spawn(self._send)
# Link the threads such that we get notified if one or the
# other exits
self._recv_thread.link(self._thread_error)
self._send_thread.link(self._thread_error)
|
[
"def",
"_start",
"(",
"self",
")",
":",
"# Initialize the locks",
"self",
".",
"_recv_lock",
"=",
"coros",
".",
"Semaphore",
"(",
"0",
")",
"self",
".",
"_send_lock",
"=",
"coros",
".",
"Semaphore",
"(",
"0",
")",
"# Boot the threads",
"self",
".",
"_recv_thread",
"=",
"gevent",
".",
"spawn",
"(",
"self",
".",
"_recv",
")",
"self",
".",
"_send_thread",
"=",
"gevent",
".",
"spawn",
"(",
"self",
".",
"_send",
")",
"# Link the threads such that we get notified if one or the",
"# other exits",
"self",
".",
"_recv_thread",
".",
"link",
"(",
"self",
".",
"_thread_error",
")",
"self",
".",
"_send_thread",
".",
"link",
"(",
"self",
".",
"_thread_error",
")"
] |
Starts the underlying send and receive threads.
|
[
"Starts",
"the",
"underlying",
"send",
"and",
"receive",
"threads",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L76-L92
|
241,606
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendril._recv
|
def _recv(self):
"""
Implementation of the receive thread. Waits for data to
arrive on the socket, then passes the data through the defined
receive framer and sends it on to the application.
"""
# Outer loop: receive some data
while True:
# Wait until we can go
self._recv_lock.release()
gevent.sleep() # Yield to another thread
self._recv_lock.acquire()
recv_buf = self._sock.recv(self.recv_bufsize)
# If it's empty, the peer closed the other end
if not recv_buf:
# Manually kill the send thread; do this manually
# instead of calling close() because close() will kill
# us, and since close() would be running in our thread
# context, it would never get around to killing the
# send thread
if self._send_thread:
self._send_thread.kill()
self._send_thread = None
# Manually close the socket
self._sock.close()
self._sock = None
# Make sure the manager knows we're closed
super(TCPTendril, self).close()
# Notify the application
self.closed()
# As our last step, commit seppuku; this will keep
# _thread_error() from notifying the application of an
# erroneous exit from the receive thread
raise gevent.GreenletExit()
# Process the received data
self._recv_frameify(recv_buf)
|
python
|
def _recv(self):
"""
Implementation of the receive thread. Waits for data to
arrive on the socket, then passes the data through the defined
receive framer and sends it on to the application.
"""
# Outer loop: receive some data
while True:
# Wait until we can go
self._recv_lock.release()
gevent.sleep() # Yield to another thread
self._recv_lock.acquire()
recv_buf = self._sock.recv(self.recv_bufsize)
# If it's empty, the peer closed the other end
if not recv_buf:
# Manually kill the send thread; do this manually
# instead of calling close() because close() will kill
# us, and since close() would be running in our thread
# context, it would never get around to killing the
# send thread
if self._send_thread:
self._send_thread.kill()
self._send_thread = None
# Manually close the socket
self._sock.close()
self._sock = None
# Make sure the manager knows we're closed
super(TCPTendril, self).close()
# Notify the application
self.closed()
# As our last step, commit seppuku; this will keep
# _thread_error() from notifying the application of an
# erroneous exit from the receive thread
raise gevent.GreenletExit()
# Process the received data
self._recv_frameify(recv_buf)
|
[
"def",
"_recv",
"(",
"self",
")",
":",
"# Outer loop: receive some data",
"while",
"True",
":",
"# Wait until we can go",
"self",
".",
"_recv_lock",
".",
"release",
"(",
")",
"gevent",
".",
"sleep",
"(",
")",
"# Yield to another thread",
"self",
".",
"_recv_lock",
".",
"acquire",
"(",
")",
"recv_buf",
"=",
"self",
".",
"_sock",
".",
"recv",
"(",
"self",
".",
"recv_bufsize",
")",
"# If it's empty, the peer closed the other end",
"if",
"not",
"recv_buf",
":",
"# Manually kill the send thread; do this manually",
"# instead of calling close() because close() will kill",
"# us, and since close() would be running in our thread",
"# context, it would never get around to killing the",
"# send thread",
"if",
"self",
".",
"_send_thread",
":",
"self",
".",
"_send_thread",
".",
"kill",
"(",
")",
"self",
".",
"_send_thread",
"=",
"None",
"# Manually close the socket",
"self",
".",
"_sock",
".",
"close",
"(",
")",
"self",
".",
"_sock",
"=",
"None",
"# Make sure the manager knows we're closed",
"super",
"(",
"TCPTendril",
",",
"self",
")",
".",
"close",
"(",
")",
"# Notify the application",
"self",
".",
"closed",
"(",
")",
"# As our last step, commit seppuku; this will keep",
"# _thread_error() from notifying the application of an",
"# erroneous exit from the receive thread",
"raise",
"gevent",
".",
"GreenletExit",
"(",
")",
"# Process the received data",
"self",
".",
"_recv_frameify",
"(",
"recv_buf",
")"
] |
Implementation of the receive thread. Waits for data to
arrive on the socket, then passes the data through the defined
receive framer and sends it on to the application.
|
[
"Implementation",
"of",
"the",
"receive",
"thread",
".",
"Waits",
"for",
"data",
"to",
"arrive",
"on",
"the",
"socket",
"then",
"passes",
"the",
"data",
"through",
"the",
"defined",
"receive",
"framer",
"and",
"sends",
"it",
"on",
"to",
"the",
"application",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L94-L137
|
241,607
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendril._thread_error
|
def _thread_error(self, thread):
"""
Handles the case that the send or receive thread exit or throw
an exception.
"""
# Avoid double-killing the thread
if thread == self._send_thread:
self._send_thread = None
if thread == self._recv_thread:
self._recv_thread = None
# Figure out why the thread exited
if thread.successful():
exception = socket.error('thread exited prematurely')
elif isinstance(thread.exception, gevent.GreenletExit):
# Thread was killed; don't do anything but close
self.close()
return
else:
exception = thread.exception
# Close the connection...
self.close()
# Notify the application what happened
self.closed(exception)
|
python
|
def _thread_error(self, thread):
"""
Handles the case that the send or receive thread exit or throw
an exception.
"""
# Avoid double-killing the thread
if thread == self._send_thread:
self._send_thread = None
if thread == self._recv_thread:
self._recv_thread = None
# Figure out why the thread exited
if thread.successful():
exception = socket.error('thread exited prematurely')
elif isinstance(thread.exception, gevent.GreenletExit):
# Thread was killed; don't do anything but close
self.close()
return
else:
exception = thread.exception
# Close the connection...
self.close()
# Notify the application what happened
self.closed(exception)
|
[
"def",
"_thread_error",
"(",
"self",
",",
"thread",
")",
":",
"# Avoid double-killing the thread",
"if",
"thread",
"==",
"self",
".",
"_send_thread",
":",
"self",
".",
"_send_thread",
"=",
"None",
"if",
"thread",
"==",
"self",
".",
"_recv_thread",
":",
"self",
".",
"_recv_thread",
"=",
"None",
"# Figure out why the thread exited",
"if",
"thread",
".",
"successful",
"(",
")",
":",
"exception",
"=",
"socket",
".",
"error",
"(",
"'thread exited prematurely'",
")",
"elif",
"isinstance",
"(",
"thread",
".",
"exception",
",",
"gevent",
".",
"GreenletExit",
")",
":",
"# Thread was killed; don't do anything but close",
"self",
".",
"close",
"(",
")",
"return",
"else",
":",
"exception",
"=",
"thread",
".",
"exception",
"# Close the connection...",
"self",
".",
"close",
"(",
")",
"# Notify the application what happened",
"self",
".",
"closed",
"(",
"exception",
")"
] |
Handles the case that the send or receive thread exit or throw
an exception.
|
[
"Handles",
"the",
"case",
"that",
"the",
"send",
"or",
"receive",
"thread",
"exit",
"or",
"throw",
"an",
"exception",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L165-L191
|
241,608
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendril.wrap
|
def wrap(self, wrapper):
"""
Allows the underlying socket to be wrapped, as by an SSL
connection.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
Note: Be extremely careful with calling this method after the
TCP connection has been initiated. The action of this method
affects both sending and receiving streams simultaneously, and
no attempt is made to deal with buffered data, other than
ensuring that both the sending and receiving threads are at
stopping points.
"""
if self._recv_thread and self._send_thread:
# Have to suspend the send/recv threads
self._recv_lock.acquire()
self._send_lock.acquire()
# Wrap the socket
self._sock = wrapper(self._sock)
# OK, restart the send/recv threads
if self._recv_thread and self._send_thread:
# Release our locks
self._send_lock.release()
self._recv_lock.release()
|
python
|
def wrap(self, wrapper):
"""
Allows the underlying socket to be wrapped, as by an SSL
connection.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
Note: Be extremely careful with calling this method after the
TCP connection has been initiated. The action of this method
affects both sending and receiving streams simultaneously, and
no attempt is made to deal with buffered data, other than
ensuring that both the sending and receiving threads are at
stopping points.
"""
if self._recv_thread and self._send_thread:
# Have to suspend the send/recv threads
self._recv_lock.acquire()
self._send_lock.acquire()
# Wrap the socket
self._sock = wrapper(self._sock)
# OK, restart the send/recv threads
if self._recv_thread and self._send_thread:
# Release our locks
self._send_lock.release()
self._recv_lock.release()
|
[
"def",
"wrap",
"(",
"self",
",",
"wrapper",
")",
":",
"if",
"self",
".",
"_recv_thread",
"and",
"self",
".",
"_send_thread",
":",
"# Have to suspend the send/recv threads",
"self",
".",
"_recv_lock",
".",
"acquire",
"(",
")",
"self",
".",
"_send_lock",
".",
"acquire",
"(",
")",
"# Wrap the socket",
"self",
".",
"_sock",
"=",
"wrapper",
"(",
"self",
".",
"_sock",
")",
"# OK, restart the send/recv threads",
"if",
"self",
".",
"_recv_thread",
"and",
"self",
".",
"_send_thread",
":",
"# Release our locks",
"self",
".",
"_send_lock",
".",
"release",
"(",
")",
"self",
".",
"_recv_lock",
".",
"release",
"(",
")"
] |
Allows the underlying socket to be wrapped, as by an SSL
connection.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
Note: Be extremely careful with calling this method after the
TCP connection has been initiated. The action of this method
affects both sending and receiving streams simultaneously, and
no attempt is made to deal with buffered data, other than
ensuring that both the sending and receiving threads are at
stopping points.
|
[
"Allows",
"the",
"underlying",
"socket",
"to",
"be",
"wrapped",
"as",
"by",
"an",
"SSL",
"connection",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L193-L224
|
241,609
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendril.close
|
def close(self):
"""
Close the connection. Kills the send and receive threads, as
well as closing the underlying socket.
"""
if self._recv_thread:
self._recv_thread.kill()
self._recv_thread = None
if self._send_thread:
self._send_thread.kill()
self._send_thread = None
if self._sock:
self._sock.close()
self._sock = None
# Make sure to notify the manager we're closed
super(TCPTendril, self).close()
|
python
|
def close(self):
"""
Close the connection. Kills the send and receive threads, as
well as closing the underlying socket.
"""
if self._recv_thread:
self._recv_thread.kill()
self._recv_thread = None
if self._send_thread:
self._send_thread.kill()
self._send_thread = None
if self._sock:
self._sock.close()
self._sock = None
# Make sure to notify the manager we're closed
super(TCPTendril, self).close()
|
[
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"_recv_thread",
":",
"self",
".",
"_recv_thread",
".",
"kill",
"(",
")",
"self",
".",
"_recv_thread",
"=",
"None",
"if",
"self",
".",
"_send_thread",
":",
"self",
".",
"_send_thread",
".",
"kill",
"(",
")",
"self",
".",
"_send_thread",
"=",
"None",
"if",
"self",
".",
"_sock",
":",
"self",
".",
"_sock",
".",
"close",
"(",
")",
"self",
".",
"_sock",
"=",
"None",
"# Make sure to notify the manager we're closed",
"super",
"(",
"TCPTendril",
",",
"self",
")",
".",
"close",
"(",
")"
] |
Close the connection. Kills the send and receive threads, as
well as closing the underlying socket.
|
[
"Close",
"the",
"connection",
".",
"Kills",
"the",
"send",
"and",
"receive",
"threads",
"as",
"well",
"as",
"closing",
"the",
"underlying",
"socket",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L242-L261
|
241,610
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendrilManager.connect
|
def connect(self, target, acceptor, wrapper=None):
"""
Initiate a connection from the tendril manager's endpoint.
Once the connection is completed, a TCPTendril object will be
created and passed to the given acceptor.
:param target: The target of the connection attempt.
:param acceptor: A callable which will initialize the state of
the new TCPTendril object.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
For passing extra arguments to the acceptor or the wrapper,
see the ``TendrilPartial`` class; for chaining together
multiple wrappers, see the ``WrapperChain`` class.
"""
# Call some common sanity-checks
super(TCPTendrilManager, self).connect(target, acceptor, wrapper)
# Set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_STREAM)
with utils.SocketCloser(sock, ignore=[application.RejectConnection]):
# Bind to our endpoint
sock.bind(self.endpoint)
# Connect to our target
sock.connect(target)
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Now, construct a Tendril
tend = TCPTendril(self, sock)
# Finally, set up the application
tend.application = acceptor(tend)
# OK, let's track the tendril
self._track_tendril(tend)
# Start the tendril
tend._start()
# Might as well return the tendril, too
return tend
# The acceptor raised a RejectConnection exception, apparently
sock.close()
return None
|
python
|
def connect(self, target, acceptor, wrapper=None):
"""
Initiate a connection from the tendril manager's endpoint.
Once the connection is completed, a TCPTendril object will be
created and passed to the given acceptor.
:param target: The target of the connection attempt.
:param acceptor: A callable which will initialize the state of
the new TCPTendril object.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
For passing extra arguments to the acceptor or the wrapper,
see the ``TendrilPartial`` class; for chaining together
multiple wrappers, see the ``WrapperChain`` class.
"""
# Call some common sanity-checks
super(TCPTendrilManager, self).connect(target, acceptor, wrapper)
# Set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_STREAM)
with utils.SocketCloser(sock, ignore=[application.RejectConnection]):
# Bind to our endpoint
sock.bind(self.endpoint)
# Connect to our target
sock.connect(target)
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Now, construct a Tendril
tend = TCPTendril(self, sock)
# Finally, set up the application
tend.application = acceptor(tend)
# OK, let's track the tendril
self._track_tendril(tend)
# Start the tendril
tend._start()
# Might as well return the tendril, too
return tend
# The acceptor raised a RejectConnection exception, apparently
sock.close()
return None
|
[
"def",
"connect",
"(",
"self",
",",
"target",
",",
"acceptor",
",",
"wrapper",
"=",
"None",
")",
":",
"# Call some common sanity-checks",
"super",
"(",
"TCPTendrilManager",
",",
"self",
")",
".",
"connect",
"(",
"target",
",",
"acceptor",
",",
"wrapper",
")",
"# Set up the socket",
"sock",
"=",
"socket",
".",
"socket",
"(",
"self",
".",
"addr_family",
",",
"socket",
".",
"SOCK_STREAM",
")",
"with",
"utils",
".",
"SocketCloser",
"(",
"sock",
",",
"ignore",
"=",
"[",
"application",
".",
"RejectConnection",
"]",
")",
":",
"# Bind to our endpoint",
"sock",
".",
"bind",
"(",
"self",
".",
"endpoint",
")",
"# Connect to our target",
"sock",
".",
"connect",
"(",
"target",
")",
"# Call any wrappers",
"if",
"wrapper",
":",
"sock",
"=",
"wrapper",
"(",
"sock",
")",
"# Now, construct a Tendril",
"tend",
"=",
"TCPTendril",
"(",
"self",
",",
"sock",
")",
"# Finally, set up the application",
"tend",
".",
"application",
"=",
"acceptor",
"(",
"tend",
")",
"# OK, let's track the tendril",
"self",
".",
"_track_tendril",
"(",
"tend",
")",
"# Start the tendril",
"tend",
".",
"_start",
"(",
")",
"# Might as well return the tendril, too",
"return",
"tend",
"# The acceptor raised a RejectConnection exception, apparently",
"sock",
".",
"close",
"(",
")",
"return",
"None"
] |
Initiate a connection from the tendril manager's endpoint.
Once the connection is completed, a TCPTendril object will be
created and passed to the given acceptor.
:param target: The target of the connection attempt.
:param acceptor: A callable which will initialize the state of
the new TCPTendril object.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
For passing extra arguments to the acceptor or the wrapper,
see the ``TendrilPartial`` class; for chaining together
multiple wrappers, see the ``WrapperChain`` class.
|
[
"Initiate",
"a",
"connection",
"from",
"the",
"tendril",
"manager",
"s",
"endpoint",
".",
"Once",
"the",
"connection",
"is",
"completed",
"a",
"TCPTendril",
"object",
"will",
"be",
"created",
"and",
"passed",
"to",
"the",
"given",
"acceptor",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L276-L330
|
241,611
|
klmitch/tendril
|
tendril/tcp.py
|
TCPTendrilManager.listener
|
def listener(self, acceptor, wrapper):
"""
Listens for new connections to the manager's endpoint. Once a
new connection is received, a TCPTendril object is generated
for it and it is passed to the acceptor, which must initialize
the state of the connection. If no acceptor is given, no new
connections can be initialized.
:param acceptor: If given, specifies a callable that will be
called with each newly received TCPTendril;
that callable is responsible for initial
acceptance of the connection and for setting
up the initial state of the connection. If
not given, no new connections will be
accepted by the TCPTendrilManager.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
"""
# If we have no acceptor, there's nothing for us to do here
if not acceptor:
# Not listening on anything
self.local_addr = None
# Just sleep in a loop
while True:
gevent.sleep(600)
return # Pragma: nocover
# OK, set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_STREAM)
with utils.SocketCloser(sock):
# Set up SO_REUSEADDR
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind to our endpoint
sock.bind(self.endpoint)
# Get the assigned port number
self.local_addr = sock.getsockname()
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Initiate listening
sock.listen(self.backlog)
# OK, now go into an accept loop with an error threshold of 10
closer = utils.SocketCloser(sock, 10,
ignore=[application.RejectConnection])
while True:
with closer:
cli, addr = sock.accept()
# OK, the connection has been accepted; construct a
# Tendril for it
tend = TCPTendril(self, cli, addr)
# Set up the application
with utils.SocketCloser(cli):
tend.application = acceptor(tend)
# Make sure we track the new tendril, but only if
# the acceptor doesn't throw any exceptions
self._track_tendril(tend)
# Start the tendril
tend._start()
|
python
|
def listener(self, acceptor, wrapper):
"""
Listens for new connections to the manager's endpoint. Once a
new connection is received, a TCPTendril object is generated
for it and it is passed to the acceptor, which must initialize
the state of the connection. If no acceptor is given, no new
connections can be initialized.
:param acceptor: If given, specifies a callable that will be
called with each newly received TCPTendril;
that callable is responsible for initial
acceptance of the connection and for setting
up the initial state of the connection. If
not given, no new connections will be
accepted by the TCPTendrilManager.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
"""
# If we have no acceptor, there's nothing for us to do here
if not acceptor:
# Not listening on anything
self.local_addr = None
# Just sleep in a loop
while True:
gevent.sleep(600)
return # Pragma: nocover
# OK, set up the socket
sock = socket.socket(self.addr_family, socket.SOCK_STREAM)
with utils.SocketCloser(sock):
# Set up SO_REUSEADDR
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind to our endpoint
sock.bind(self.endpoint)
# Get the assigned port number
self.local_addr = sock.getsockname()
# Call any wrappers
if wrapper:
sock = wrapper(sock)
# Initiate listening
sock.listen(self.backlog)
# OK, now go into an accept loop with an error threshold of 10
closer = utils.SocketCloser(sock, 10,
ignore=[application.RejectConnection])
while True:
with closer:
cli, addr = sock.accept()
# OK, the connection has been accepted; construct a
# Tendril for it
tend = TCPTendril(self, cli, addr)
# Set up the application
with utils.SocketCloser(cli):
tend.application = acceptor(tend)
# Make sure we track the new tendril, but only if
# the acceptor doesn't throw any exceptions
self._track_tendril(tend)
# Start the tendril
tend._start()
|
[
"def",
"listener",
"(",
"self",
",",
"acceptor",
",",
"wrapper",
")",
":",
"# If we have no acceptor, there's nothing for us to do here",
"if",
"not",
"acceptor",
":",
"# Not listening on anything",
"self",
".",
"local_addr",
"=",
"None",
"# Just sleep in a loop",
"while",
"True",
":",
"gevent",
".",
"sleep",
"(",
"600",
")",
"return",
"# Pragma: nocover",
"# OK, set up the socket",
"sock",
"=",
"socket",
".",
"socket",
"(",
"self",
".",
"addr_family",
",",
"socket",
".",
"SOCK_STREAM",
")",
"with",
"utils",
".",
"SocketCloser",
"(",
"sock",
")",
":",
"# Set up SO_REUSEADDR",
"sock",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"# Bind to our endpoint",
"sock",
".",
"bind",
"(",
"self",
".",
"endpoint",
")",
"# Get the assigned port number",
"self",
".",
"local_addr",
"=",
"sock",
".",
"getsockname",
"(",
")",
"# Call any wrappers",
"if",
"wrapper",
":",
"sock",
"=",
"wrapper",
"(",
"sock",
")",
"# Initiate listening",
"sock",
".",
"listen",
"(",
"self",
".",
"backlog",
")",
"# OK, now go into an accept loop with an error threshold of 10",
"closer",
"=",
"utils",
".",
"SocketCloser",
"(",
"sock",
",",
"10",
",",
"ignore",
"=",
"[",
"application",
".",
"RejectConnection",
"]",
")",
"while",
"True",
":",
"with",
"closer",
":",
"cli",
",",
"addr",
"=",
"sock",
".",
"accept",
"(",
")",
"# OK, the connection has been accepted; construct a",
"# Tendril for it",
"tend",
"=",
"TCPTendril",
"(",
"self",
",",
"cli",
",",
"addr",
")",
"# Set up the application",
"with",
"utils",
".",
"SocketCloser",
"(",
"cli",
")",
":",
"tend",
".",
"application",
"=",
"acceptor",
"(",
"tend",
")",
"# Make sure we track the new tendril, but only if",
"# the acceptor doesn't throw any exceptions",
"self",
".",
"_track_tendril",
"(",
"tend",
")",
"# Start the tendril",
"tend",
".",
"_start",
"(",
")"
] |
Listens for new connections to the manager's endpoint. Once a
new connection is received, a TCPTendril object is generated
for it and it is passed to the acceptor, which must initialize
the state of the connection. If no acceptor is given, no new
connections can be initialized.
:param acceptor: If given, specifies a callable that will be
called with each newly received TCPTendril;
that callable is responsible for initial
acceptance of the connection and for setting
up the initial state of the connection. If
not given, no new connections will be
accepted by the TCPTendrilManager.
:param wrapper: A callable taking, as its first argument, a
socket.socket object. The callable must
return a valid proxy for the socket.socket
object, which will subsequently be used to
communicate on the connection.
|
[
"Listens",
"for",
"new",
"connections",
"to",
"the",
"manager",
"s",
"endpoint",
".",
"Once",
"a",
"new",
"connection",
"is",
"received",
"a",
"TCPTendril",
"object",
"is",
"generated",
"for",
"it",
"and",
"it",
"is",
"passed",
"to",
"the",
"acceptor",
"which",
"must",
"initialize",
"the",
"state",
"of",
"the",
"connection",
".",
"If",
"no",
"acceptor",
"is",
"given",
"no",
"new",
"connections",
"can",
"be",
"initialized",
"."
] |
207102c83e88f8f1fa7ba605ef0aab2ae9078b36
|
https://github.com/klmitch/tendril/blob/207102c83e88f8f1fa7ba605ef0aab2ae9078b36/tendril/tcp.py#L332-L404
|
241,612
|
noobermin/lspreader
|
lspreader/dotlsp.py
|
getdim
|
def getdim(lsp):
'''
Obtain the dimensionality of a .lsp file. This should work for all well
formatted .lsp files.
Parameters:
-----------
lsp : .lsp string
Returns a list of dimensions.
'''
dims= ['x','y', 'z'];
rxs = ['{}-cells *([0-9]+)'.format(x) for x in ['x','y','z']];
return [
x for x,rx in zip(dims,rxs)
if re.search(rx,lsp) and int(re.search(rx,lsp).group(1)) > 0 ];
|
python
|
def getdim(lsp):
'''
Obtain the dimensionality of a .lsp file. This should work for all well
formatted .lsp files.
Parameters:
-----------
lsp : .lsp string
Returns a list of dimensions.
'''
dims= ['x','y', 'z'];
rxs = ['{}-cells *([0-9]+)'.format(x) for x in ['x','y','z']];
return [
x for x,rx in zip(dims,rxs)
if re.search(rx,lsp) and int(re.search(rx,lsp).group(1)) > 0 ];
|
[
"def",
"getdim",
"(",
"lsp",
")",
":",
"dims",
"=",
"[",
"'x'",
",",
"'y'",
",",
"'z'",
"]",
"rxs",
"=",
"[",
"'{}-cells *([0-9]+)'",
".",
"format",
"(",
"x",
")",
"for",
"x",
"in",
"[",
"'x'",
",",
"'y'",
",",
"'z'",
"]",
"]",
"return",
"[",
"x",
"for",
"x",
",",
"rx",
"in",
"zip",
"(",
"dims",
",",
"rxs",
")",
"if",
"re",
".",
"search",
"(",
"rx",
",",
"lsp",
")",
"and",
"int",
"(",
"re",
".",
"search",
"(",
"rx",
",",
"lsp",
")",
".",
"group",
"(",
"1",
")",
")",
">",
"0",
"]"
] |
Obtain the dimensionality of a .lsp file. This should work for all well
formatted .lsp files.
Parameters:
-----------
lsp : .lsp string
Returns a list of dimensions.
|
[
"Obtain",
"the",
"dimensionality",
"of",
"a",
".",
"lsp",
"file",
".",
"This",
"should",
"work",
"for",
"all",
"well",
"formatted",
".",
"lsp",
"files",
"."
] |
903b9d6427513b07986ffacf76cbca54e18d8be6
|
https://github.com/noobermin/lspreader/blob/903b9d6427513b07986ffacf76cbca54e18d8be6/lspreader/dotlsp.py#L26-L41
|
241,613
|
noobermin/lspreader
|
lspreader/dotlsp.py
|
getpexts
|
def getpexts(lsp):
'''
Get information from pext planes. This might or might not work, use with
caution!
Parameters:
-----------
lsp : .lsp string
Returns a list of dicts with information for all pext planes
'''
lines=lsp.split('\n');
#unfortunately regex doesn't work here
lns,planens = zip(
*[ (i,int(re.search('^ *extract *([0-9]+)',line).group(1)))
for i,line in enumerate(lines)
if re.search('^ *extract *[0-9]+', line)]);
if len(lns) == 0: return [];
end = lns[-1];
for i,line in enumerate(lines[end+1:]):
if re.match(' *\[',line): break;
end += i;
lineranges = zip(lns,(lns+(end,))[1:]);
planes=dict()
for (i,end),plane in zip(lineranges,planens):
d=dict();
labels = [
'species',
'direction',
'position',];
datarx = [
'^ *species *([0-9]+)',
'^ *direction *([xXyYzZ])',
'^ *at *(.*)',];
convs = [
lambda s: int(s),
lambda i: i,
lambda s: np.array(
map(float,s.split(' '))),
];
for line in lines[i:end]:
for label,rx,conv in zip(labels,datarx,convs):
if re.match(rx,line):
d[label]=conv(re.match(rx,line).group(1));
pass
pass
planes[plane] = d;
return planes;
|
python
|
def getpexts(lsp):
'''
Get information from pext planes. This might or might not work, use with
caution!
Parameters:
-----------
lsp : .lsp string
Returns a list of dicts with information for all pext planes
'''
lines=lsp.split('\n');
#unfortunately regex doesn't work here
lns,planens = zip(
*[ (i,int(re.search('^ *extract *([0-9]+)',line).group(1)))
for i,line in enumerate(lines)
if re.search('^ *extract *[0-9]+', line)]);
if len(lns) == 0: return [];
end = lns[-1];
for i,line in enumerate(lines[end+1:]):
if re.match(' *\[',line): break;
end += i;
lineranges = zip(lns,(lns+(end,))[1:]);
planes=dict()
for (i,end),plane in zip(lineranges,planens):
d=dict();
labels = [
'species',
'direction',
'position',];
datarx = [
'^ *species *([0-9]+)',
'^ *direction *([xXyYzZ])',
'^ *at *(.*)',];
convs = [
lambda s: int(s),
lambda i: i,
lambda s: np.array(
map(float,s.split(' '))),
];
for line in lines[i:end]:
for label,rx,conv in zip(labels,datarx,convs):
if re.match(rx,line):
d[label]=conv(re.match(rx,line).group(1));
pass
pass
planes[plane] = d;
return planes;
|
[
"def",
"getpexts",
"(",
"lsp",
")",
":",
"lines",
"=",
"lsp",
".",
"split",
"(",
"'\\n'",
")",
"#unfortunately regex doesn't work here",
"lns",
",",
"planens",
"=",
"zip",
"(",
"*",
"[",
"(",
"i",
",",
"int",
"(",
"re",
".",
"search",
"(",
"'^ *extract *([0-9]+)'",
",",
"line",
")",
".",
"group",
"(",
"1",
")",
")",
")",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"lines",
")",
"if",
"re",
".",
"search",
"(",
"'^ *extract *[0-9]+'",
",",
"line",
")",
"]",
")",
"if",
"len",
"(",
"lns",
")",
"==",
"0",
":",
"return",
"[",
"]",
"end",
"=",
"lns",
"[",
"-",
"1",
"]",
"for",
"i",
",",
"line",
"in",
"enumerate",
"(",
"lines",
"[",
"end",
"+",
"1",
":",
"]",
")",
":",
"if",
"re",
".",
"match",
"(",
"' *\\['",
",",
"line",
")",
":",
"break",
"end",
"+=",
"i",
"lineranges",
"=",
"zip",
"(",
"lns",
",",
"(",
"lns",
"+",
"(",
"end",
",",
")",
")",
"[",
"1",
":",
"]",
")",
"planes",
"=",
"dict",
"(",
")",
"for",
"(",
"i",
",",
"end",
")",
",",
"plane",
"in",
"zip",
"(",
"lineranges",
",",
"planens",
")",
":",
"d",
"=",
"dict",
"(",
")",
"labels",
"=",
"[",
"'species'",
",",
"'direction'",
",",
"'position'",
",",
"]",
"datarx",
"=",
"[",
"'^ *species *([0-9]+)'",
",",
"'^ *direction *([xXyYzZ])'",
",",
"'^ *at *(.*)'",
",",
"]",
"convs",
"=",
"[",
"lambda",
"s",
":",
"int",
"(",
"s",
")",
",",
"lambda",
"i",
":",
"i",
",",
"lambda",
"s",
":",
"np",
".",
"array",
"(",
"map",
"(",
"float",
",",
"s",
".",
"split",
"(",
"' '",
")",
")",
")",
",",
"]",
"for",
"line",
"in",
"lines",
"[",
"i",
":",
"end",
"]",
":",
"for",
"label",
",",
"rx",
",",
"conv",
"in",
"zip",
"(",
"labels",
",",
"datarx",
",",
"convs",
")",
":",
"if",
"re",
".",
"match",
"(",
"rx",
",",
"line",
")",
":",
"d",
"[",
"label",
"]",
"=",
"conv",
"(",
"re",
".",
"match",
"(",
"rx",
",",
"line",
")",
".",
"group",
"(",
"1",
")",
")",
"pass",
"pass",
"planes",
"[",
"plane",
"]",
"=",
"d",
"return",
"planes"
] |
Get information from pext planes. This might or might not work, use with
caution!
Parameters:
-----------
lsp : .lsp string
Returns a list of dicts with information for all pext planes
|
[
"Get",
"information",
"from",
"pext",
"planes",
".",
"This",
"might",
"or",
"might",
"not",
"work",
"use",
"with",
"caution!"
] |
903b9d6427513b07986ffacf76cbca54e18d8be6
|
https://github.com/noobermin/lspreader/blob/903b9d6427513b07986ffacf76cbca54e18d8be6/lspreader/dotlsp.py#L43-L92
|
241,614
|
shaypal5/comath
|
comath/array/array.py
|
percentile
|
def percentile(sorted_list, percent, key=lambda x: x):
"""Find the percentile of a sorted list of values.
Arguments
---------
sorted_list : list
A sorted (ascending) list of values.
percent : float
A float value from 0.0 to 1.0.
key : function, optional
An optional function to compute a value from each element of N.
Returns
-------
float
The desired percentile of the value list.
Examples
--------
>>> sorted_list = [4,6,8,9,11]
>>> percentile(sorted_list, 0.4)
7.0
>>> percentile(sorted_list, 0.44)
8.0
>>> percentile(sorted_list, 0.6)
8.5
>>> percentile(sorted_list, 0.99)
11.0
>>> percentile(sorted_list, 1)
11.0
>>> percentile(sorted_list, 0)
4.0
"""
if not sorted_list:
return None
if percent == 1:
return float(sorted_list[-1])
if percent == 0:
return float(sorted_list[0])
n = len(sorted_list)
i = percent * n
if ceil(i) == i:
i = int(i)
return (sorted_list[i-1] + sorted_list[i]) / 2
return float(sorted_list[ceil(i)-1])
|
python
|
def percentile(sorted_list, percent, key=lambda x: x):
"""Find the percentile of a sorted list of values.
Arguments
---------
sorted_list : list
A sorted (ascending) list of values.
percent : float
A float value from 0.0 to 1.0.
key : function, optional
An optional function to compute a value from each element of N.
Returns
-------
float
The desired percentile of the value list.
Examples
--------
>>> sorted_list = [4,6,8,9,11]
>>> percentile(sorted_list, 0.4)
7.0
>>> percentile(sorted_list, 0.44)
8.0
>>> percentile(sorted_list, 0.6)
8.5
>>> percentile(sorted_list, 0.99)
11.0
>>> percentile(sorted_list, 1)
11.0
>>> percentile(sorted_list, 0)
4.0
"""
if not sorted_list:
return None
if percent == 1:
return float(sorted_list[-1])
if percent == 0:
return float(sorted_list[0])
n = len(sorted_list)
i = percent * n
if ceil(i) == i:
i = int(i)
return (sorted_list[i-1] + sorted_list[i]) / 2
return float(sorted_list[ceil(i)-1])
|
[
"def",
"percentile",
"(",
"sorted_list",
",",
"percent",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
")",
":",
"if",
"not",
"sorted_list",
":",
"return",
"None",
"if",
"percent",
"==",
"1",
":",
"return",
"float",
"(",
"sorted_list",
"[",
"-",
"1",
"]",
")",
"if",
"percent",
"==",
"0",
":",
"return",
"float",
"(",
"sorted_list",
"[",
"0",
"]",
")",
"n",
"=",
"len",
"(",
"sorted_list",
")",
"i",
"=",
"percent",
"*",
"n",
"if",
"ceil",
"(",
"i",
")",
"==",
"i",
":",
"i",
"=",
"int",
"(",
"i",
")",
"return",
"(",
"sorted_list",
"[",
"i",
"-",
"1",
"]",
"+",
"sorted_list",
"[",
"i",
"]",
")",
"/",
"2",
"return",
"float",
"(",
"sorted_list",
"[",
"ceil",
"(",
"i",
")",
"-",
"1",
"]",
")"
] |
Find the percentile of a sorted list of values.
Arguments
---------
sorted_list : list
A sorted (ascending) list of values.
percent : float
A float value from 0.0 to 1.0.
key : function, optional
An optional function to compute a value from each element of N.
Returns
-------
float
The desired percentile of the value list.
Examples
--------
>>> sorted_list = [4,6,8,9,11]
>>> percentile(sorted_list, 0.4)
7.0
>>> percentile(sorted_list, 0.44)
8.0
>>> percentile(sorted_list, 0.6)
8.5
>>> percentile(sorted_list, 0.99)
11.0
>>> percentile(sorted_list, 1)
11.0
>>> percentile(sorted_list, 0)
4.0
|
[
"Find",
"the",
"percentile",
"of",
"a",
"sorted",
"list",
"of",
"values",
"."
] |
1333e3b96242a5bad9d3e699ffd58a1597fdc89f
|
https://github.com/shaypal5/comath/blob/1333e3b96242a5bad9d3e699ffd58a1597fdc89f/comath/array/array.py#L24-L68
|
241,615
|
Apitax/Apitax
|
apitax/api/controllers/api_controller.py
|
get_driver_api_catalog
|
def get_driver_api_catalog(driver): # noqa: E501
"""Retrieve the api catalog
Retrieve the api catalog # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add(driver.getApiEndpointCatalog())
return Response(status=200, body=response.getResponseBody())
|
python
|
def get_driver_api_catalog(driver): # noqa: E501
"""Retrieve the api catalog
Retrieve the api catalog # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add(driver.getApiEndpointCatalog())
return Response(status=200, body=response.getResponseBody())
|
[
"def",
"get_driver_api_catalog",
"(",
"driver",
")",
":",
"# noqa: E501",
"response",
"=",
"errorIfUnauthorized",
"(",
"role",
"=",
"'developer'",
")",
"if",
"response",
":",
"return",
"response",
"else",
":",
"response",
"=",
"ApitaxResponse",
"(",
")",
"driver",
":",
"Driver",
"=",
"LoadedDrivers",
".",
"getDriver",
"(",
"driver",
")",
"response",
".",
"body",
".",
"add",
"(",
"driver",
".",
"getApiEndpointCatalog",
"(",
")",
")",
"return",
"Response",
"(",
"status",
"=",
"200",
",",
"body",
"=",
"response",
".",
"getResponseBody",
"(",
")",
")"
] |
Retrieve the api catalog
Retrieve the api catalog # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
|
[
"Retrieve",
"the",
"api",
"catalog"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/api_controller.py#L17-L36
|
241,616
|
Apitax/Apitax
|
apitax/api/controllers/api_controller.py
|
get_driver_api_status
|
def get_driver_api_status(driver): # noqa: E501
"""Retrieve the status of an api backing a driver
Retrieve the status of an api backing a driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({"format": driver.getApiFormat()})
response.body.add({"description": driver.getApiDescription()})
response.body.add({"status": driver.getApiStatus()})
response.body.add({"auth-type": driver.getApiAuthType()})
endpoints = {}
endpoints['base'] = driver.getApiBaseEndpoint()
endpoints['catalog'] = driver.getApiCatalogEndpoint()
endpoints['auth'] = driver.getApiAuthEndpoint()
response.body.add({'endpoints': endpoints})
options = {}
options['authenticatable'] = driver.isApiAuthenticated()
options['authentication-separate'] = driver.isApiAuthenticationSeparateRequest()
options['cataloggable'] = driver.isApiCataloggable()
options['tokenable'] = driver.isApiTokenable()
response.body.add({'options': options})
return Response(status=200, body=response.getResponseBody())
|
python
|
def get_driver_api_status(driver): # noqa: E501
"""Retrieve the status of an api backing a driver
Retrieve the status of an api backing a driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
"""
response = errorIfUnauthorized(role='developer')
if response:
return response
else:
response = ApitaxResponse()
driver: Driver = LoadedDrivers.getDriver(driver)
response.body.add({"format": driver.getApiFormat()})
response.body.add({"description": driver.getApiDescription()})
response.body.add({"status": driver.getApiStatus()})
response.body.add({"auth-type": driver.getApiAuthType()})
endpoints = {}
endpoints['base'] = driver.getApiBaseEndpoint()
endpoints['catalog'] = driver.getApiCatalogEndpoint()
endpoints['auth'] = driver.getApiAuthEndpoint()
response.body.add({'endpoints': endpoints})
options = {}
options['authenticatable'] = driver.isApiAuthenticated()
options['authentication-separate'] = driver.isApiAuthenticationSeparateRequest()
options['cataloggable'] = driver.isApiCataloggable()
options['tokenable'] = driver.isApiTokenable()
response.body.add({'options': options})
return Response(status=200, body=response.getResponseBody())
|
[
"def",
"get_driver_api_status",
"(",
"driver",
")",
":",
"# noqa: E501",
"response",
"=",
"errorIfUnauthorized",
"(",
"role",
"=",
"'developer'",
")",
"if",
"response",
":",
"return",
"response",
"else",
":",
"response",
"=",
"ApitaxResponse",
"(",
")",
"driver",
":",
"Driver",
"=",
"LoadedDrivers",
".",
"getDriver",
"(",
"driver",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"\"format\"",
":",
"driver",
".",
"getApiFormat",
"(",
")",
"}",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"\"description\"",
":",
"driver",
".",
"getApiDescription",
"(",
")",
"}",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"\"status\"",
":",
"driver",
".",
"getApiStatus",
"(",
")",
"}",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"\"auth-type\"",
":",
"driver",
".",
"getApiAuthType",
"(",
")",
"}",
")",
"endpoints",
"=",
"{",
"}",
"endpoints",
"[",
"'base'",
"]",
"=",
"driver",
".",
"getApiBaseEndpoint",
"(",
")",
"endpoints",
"[",
"'catalog'",
"]",
"=",
"driver",
".",
"getApiCatalogEndpoint",
"(",
")",
"endpoints",
"[",
"'auth'",
"]",
"=",
"driver",
".",
"getApiAuthEndpoint",
"(",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"'endpoints'",
":",
"endpoints",
"}",
")",
"options",
"=",
"{",
"}",
"options",
"[",
"'authenticatable'",
"]",
"=",
"driver",
".",
"isApiAuthenticated",
"(",
")",
"options",
"[",
"'authentication-separate'",
"]",
"=",
"driver",
".",
"isApiAuthenticationSeparateRequest",
"(",
")",
"options",
"[",
"'cataloggable'",
"]",
"=",
"driver",
".",
"isApiCataloggable",
"(",
")",
"options",
"[",
"'tokenable'",
"]",
"=",
"driver",
".",
"isApiTokenable",
"(",
")",
"response",
".",
"body",
".",
"add",
"(",
"{",
"'options'",
":",
"options",
"}",
")",
"return",
"Response",
"(",
"status",
"=",
"200",
",",
"body",
"=",
"response",
".",
"getResponseBody",
"(",
")",
")"
] |
Retrieve the status of an api backing a driver
Retrieve the status of an api backing a driver # noqa: E501
:param driver: The driver to use for the request. ie. github
:type driver: str
:rtype: Response
|
[
"Retrieve",
"the",
"status",
"of",
"an",
"api",
"backing",
"a",
"driver"
] |
3883e45f17e01eba4edac9d1bba42f0e7a748682
|
https://github.com/Apitax/Apitax/blob/3883e45f17e01eba4edac9d1bba42f0e7a748682/apitax/api/controllers/api_controller.py#L40-L75
|
241,617
|
kejbaly2/idid
|
idid/logg.py
|
Logg._parse_engine
|
def _parse_engine(engine):
""" Parse the engine uri to determine where to store loggs """
engine = (engine or '').strip()
backend, path = URI_RE.match(engine).groups()
if backend not in SUPPORTED_BACKENDS:
raise NotImplementedError(
"Logg supports only {0} for now.".format(SUPPORTED_BACKENDS))
log.debug('Found engine: {0}'.format(engine))
return backend, path
|
python
|
def _parse_engine(engine):
""" Parse the engine uri to determine where to store loggs """
engine = (engine or '').strip()
backend, path = URI_RE.match(engine).groups()
if backend not in SUPPORTED_BACKENDS:
raise NotImplementedError(
"Logg supports only {0} for now.".format(SUPPORTED_BACKENDS))
log.debug('Found engine: {0}'.format(engine))
return backend, path
|
[
"def",
"_parse_engine",
"(",
"engine",
")",
":",
"engine",
"=",
"(",
"engine",
"or",
"''",
")",
".",
"strip",
"(",
")",
"backend",
",",
"path",
"=",
"URI_RE",
".",
"match",
"(",
"engine",
")",
".",
"groups",
"(",
")",
"if",
"backend",
"not",
"in",
"SUPPORTED_BACKENDS",
":",
"raise",
"NotImplementedError",
"(",
"\"Logg supports only {0} for now.\"",
".",
"format",
"(",
"SUPPORTED_BACKENDS",
")",
")",
"log",
".",
"debug",
"(",
"'Found engine: {0}'",
".",
"format",
"(",
"engine",
")",
")",
"return",
"backend",
",",
"path"
] |
Parse the engine uri to determine where to store loggs
|
[
"Parse",
"the",
"engine",
"uri",
"to",
"determine",
"where",
"to",
"store",
"loggs"
] |
0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c
|
https://github.com/kejbaly2/idid/blob/0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c/idid/logg.py#L199-L208
|
241,618
|
kejbaly2/idid
|
idid/logg.py
|
GitLogg._init_repo
|
def _init_repo(self):
""" create and initialize a new Git Repo """
log.debug("initializing new Git Repo: {0}".format(self._engine_path))
if os.path.exists(self._engine_path):
log.error("Path already exists! Aborting!")
raise RuntimeError
else:
# create the repo if it doesn't already exist
_logg_repo = git.Repo.init(path=self._engine_path, mkdir=True)
record = "idid Logg repo initialized on {0}".format(today())
c = _logg_repo.index.commit(record)
assert c.type == 'commit'
log.info('Created git repo [{0}]'.format(self._engine_path))
return _logg_repo
|
python
|
def _init_repo(self):
""" create and initialize a new Git Repo """
log.debug("initializing new Git Repo: {0}".format(self._engine_path))
if os.path.exists(self._engine_path):
log.error("Path already exists! Aborting!")
raise RuntimeError
else:
# create the repo if it doesn't already exist
_logg_repo = git.Repo.init(path=self._engine_path, mkdir=True)
record = "idid Logg repo initialized on {0}".format(today())
c = _logg_repo.index.commit(record)
assert c.type == 'commit'
log.info('Created git repo [{0}]'.format(self._engine_path))
return _logg_repo
|
[
"def",
"_init_repo",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"\"initializing new Git Repo: {0}\"",
".",
"format",
"(",
"self",
".",
"_engine_path",
")",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"_engine_path",
")",
":",
"log",
".",
"error",
"(",
"\"Path already exists! Aborting!\"",
")",
"raise",
"RuntimeError",
"else",
":",
"# create the repo if it doesn't already exist",
"_logg_repo",
"=",
"git",
".",
"Repo",
".",
"init",
"(",
"path",
"=",
"self",
".",
"_engine_path",
",",
"mkdir",
"=",
"True",
")",
"record",
"=",
"\"idid Logg repo initialized on {0}\"",
".",
"format",
"(",
"today",
"(",
")",
")",
"c",
"=",
"_logg_repo",
".",
"index",
".",
"commit",
"(",
"record",
")",
"assert",
"c",
".",
"type",
"==",
"'commit'",
"log",
".",
"info",
"(",
"'Created git repo [{0}]'",
".",
"format",
"(",
"self",
".",
"_engine_path",
")",
")",
"return",
"_logg_repo"
] |
create and initialize a new Git Repo
|
[
"create",
"and",
"initialize",
"a",
"new",
"Git",
"Repo"
] |
0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c
|
https://github.com/kejbaly2/idid/blob/0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c/idid/logg.py#L366-L379
|
241,619
|
kejbaly2/idid
|
idid/logg.py
|
GitLogg._load_repo
|
def _load_repo(self):
""" Load git repo using GitPython """
if self._logg_repo:
return self._logg_repo
try:
_logg_repo = git.Repo(self._engine_path)
log.debug('Loaded git repo [{0}]'.format(self._engine_path))
except Exception:
# FIXME: should this be automatic?
# log.error("Git repo doesn't exist! run ``idid init``")
_logg_repo = self._init_repo()
return _logg_repo
|
python
|
def _load_repo(self):
""" Load git repo using GitPython """
if self._logg_repo:
return self._logg_repo
try:
_logg_repo = git.Repo(self._engine_path)
log.debug('Loaded git repo [{0}]'.format(self._engine_path))
except Exception:
# FIXME: should this be automatic?
# log.error("Git repo doesn't exist! run ``idid init``")
_logg_repo = self._init_repo()
return _logg_repo
|
[
"def",
"_load_repo",
"(",
"self",
")",
":",
"if",
"self",
".",
"_logg_repo",
":",
"return",
"self",
".",
"_logg_repo",
"try",
":",
"_logg_repo",
"=",
"git",
".",
"Repo",
"(",
"self",
".",
"_engine_path",
")",
"log",
".",
"debug",
"(",
"'Loaded git repo [{0}]'",
".",
"format",
"(",
"self",
".",
"_engine_path",
")",
")",
"except",
"Exception",
":",
"# FIXME: should this be automatic?",
"# log.error(\"Git repo doesn't exist! run ``idid init``\")",
"_logg_repo",
"=",
"self",
".",
"_init_repo",
"(",
")",
"return",
"_logg_repo"
] |
Load git repo using GitPython
|
[
"Load",
"git",
"repo",
"using",
"GitPython"
] |
0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c
|
https://github.com/kejbaly2/idid/blob/0f19e9ca9c8fa4a81e95c490dfbbc1b452c7451c/idid/logg.py#L381-L393
|
241,620
|
diffeo/yakonfig
|
yakonfig/cmd.py
|
ArgParseCmd.add_arguments
|
def add_arguments(self, parser):
'''Add generic command-line arguments to a top-level argparse parser.
After running this, the results from ``argparse.parse_args()``
can be passed to :meth:`main`.
'''
commands = set(name[3:] for name in dir(self) if name.startswith('do_'))
parser.add_argument('action', help='action to run', nargs='?',
choices=list(commands))
parser.add_argument('arguments', help='arguments specific to ACTION',
nargs=argparse.REMAINDER)
|
python
|
def add_arguments(self, parser):
'''Add generic command-line arguments to a top-level argparse parser.
After running this, the results from ``argparse.parse_args()``
can be passed to :meth:`main`.
'''
commands = set(name[3:] for name in dir(self) if name.startswith('do_'))
parser.add_argument('action', help='action to run', nargs='?',
choices=list(commands))
parser.add_argument('arguments', help='arguments specific to ACTION',
nargs=argparse.REMAINDER)
|
[
"def",
"add_arguments",
"(",
"self",
",",
"parser",
")",
":",
"commands",
"=",
"set",
"(",
"name",
"[",
"3",
":",
"]",
"for",
"name",
"in",
"dir",
"(",
"self",
")",
"if",
"name",
".",
"startswith",
"(",
"'do_'",
")",
")",
"parser",
".",
"add_argument",
"(",
"'action'",
",",
"help",
"=",
"'action to run'",
",",
"nargs",
"=",
"'?'",
",",
"choices",
"=",
"list",
"(",
"commands",
")",
")",
"parser",
".",
"add_argument",
"(",
"'arguments'",
",",
"help",
"=",
"'arguments specific to ACTION'",
",",
"nargs",
"=",
"argparse",
".",
"REMAINDER",
")"
] |
Add generic command-line arguments to a top-level argparse parser.
After running this, the results from ``argparse.parse_args()``
can be passed to :meth:`main`.
|
[
"Add",
"generic",
"command",
"-",
"line",
"arguments",
"to",
"a",
"top",
"-",
"level",
"argparse",
"parser",
"."
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/cmd.py#L84-L95
|
241,621
|
diffeo/yakonfig
|
yakonfig/cmd.py
|
ArgParseCmd.main
|
def main(self, args):
'''Run a single command, or else the main shell loop.
`args` should be the :class:`argparse.Namespace` object after
being set up via :meth:`add_arguments`.
'''
if args.action:
self.runcmd(args.action, args.arguments)
else:
self.cmdloop()
|
python
|
def main(self, args):
'''Run a single command, or else the main shell loop.
`args` should be the :class:`argparse.Namespace` object after
being set up via :meth:`add_arguments`.
'''
if args.action:
self.runcmd(args.action, args.arguments)
else:
self.cmdloop()
|
[
"def",
"main",
"(",
"self",
",",
"args",
")",
":",
"if",
"args",
".",
"action",
":",
"self",
".",
"runcmd",
"(",
"args",
".",
"action",
",",
"args",
".",
"arguments",
")",
"else",
":",
"self",
".",
"cmdloop",
"(",
")"
] |
Run a single command, or else the main shell loop.
`args` should be the :class:`argparse.Namespace` object after
being set up via :meth:`add_arguments`.
|
[
"Run",
"a",
"single",
"command",
"or",
"else",
"the",
"main",
"shell",
"loop",
"."
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/cmd.py#L97-L107
|
241,622
|
diffeo/yakonfig
|
yakonfig/cmd.py
|
ArgParseCmd.runcmd
|
def runcmd(self, cmd, args):
'''Run a single command from pre-parsed arguments.
This is intended to be run from :meth:`main` or somewhere else
"at the top level" of the program. It may raise
:exc:`exceptions.SystemExit` if an argument such as ``--help``
that normally causes execution to stop is encountered.
'''
dof = getattr(self, 'do_' + cmd, None)
if dof is None:
return self.default(' '.join([cmd] + args))
argf = getattr(self, 'args_' + cmd, None)
if argf is not None:
parser = argparse.ArgumentParser(
prog=cmd,
description=getattr(dof, '__doc__', None))
argf(parser)
argl = parser.parse_args(args)
else:
argl = ' '.join(args)
return dof(argl)
|
python
|
def runcmd(self, cmd, args):
'''Run a single command from pre-parsed arguments.
This is intended to be run from :meth:`main` or somewhere else
"at the top level" of the program. It may raise
:exc:`exceptions.SystemExit` if an argument such as ``--help``
that normally causes execution to stop is encountered.
'''
dof = getattr(self, 'do_' + cmd, None)
if dof is None:
return self.default(' '.join([cmd] + args))
argf = getattr(self, 'args_' + cmd, None)
if argf is not None:
parser = argparse.ArgumentParser(
prog=cmd,
description=getattr(dof, '__doc__', None))
argf(parser)
argl = parser.parse_args(args)
else:
argl = ' '.join(args)
return dof(argl)
|
[
"def",
"runcmd",
"(",
"self",
",",
"cmd",
",",
"args",
")",
":",
"dof",
"=",
"getattr",
"(",
"self",
",",
"'do_'",
"+",
"cmd",
",",
"None",
")",
"if",
"dof",
"is",
"None",
":",
"return",
"self",
".",
"default",
"(",
"' '",
".",
"join",
"(",
"[",
"cmd",
"]",
"+",
"args",
")",
")",
"argf",
"=",
"getattr",
"(",
"self",
",",
"'args_'",
"+",
"cmd",
",",
"None",
")",
"if",
"argf",
"is",
"not",
"None",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"cmd",
",",
"description",
"=",
"getattr",
"(",
"dof",
",",
"'__doc__'",
",",
"None",
")",
")",
"argf",
"(",
"parser",
")",
"argl",
"=",
"parser",
".",
"parse_args",
"(",
"args",
")",
"else",
":",
"argl",
"=",
"' '",
".",
"join",
"(",
"args",
")",
"return",
"dof",
"(",
"argl",
")"
] |
Run a single command from pre-parsed arguments.
This is intended to be run from :meth:`main` or somewhere else
"at the top level" of the program. It may raise
:exc:`exceptions.SystemExit` if an argument such as ``--help``
that normally causes execution to stop is encountered.
|
[
"Run",
"a",
"single",
"command",
"from",
"pre",
"-",
"parsed",
"arguments",
"."
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/cmd.py#L109-L130
|
241,623
|
diffeo/yakonfig
|
yakonfig/cmd.py
|
ArgParseCmd.do_help
|
def do_help(self, args):
'''print help on a command'''
if args.command:
f = getattr(self, 'help_' + args.command, None)
if f:
f()
return
f = getattr(self, 'do_' + args.command, None)
if not f:
msg = self.nohelp % (args.command,)
self.stdout.write('{0}\n'.format(msg))
return
docstr = getattr(f, '__doc__', None)
f = getattr(self, 'args_' + args.command, None)
if f:
parser = argparse.ArgumentParser(
prog=args.command,
description=docstr)
f(parser)
parser.print_help(file=self.stdout)
else:
if not docstr:
docstr = self.nohelp % (args.command,)
self.stdout.write('{0}\n'.format(docstr))
else:
Cmd.do_help(self, '')
|
python
|
def do_help(self, args):
'''print help on a command'''
if args.command:
f = getattr(self, 'help_' + args.command, None)
if f:
f()
return
f = getattr(self, 'do_' + args.command, None)
if not f:
msg = self.nohelp % (args.command,)
self.stdout.write('{0}\n'.format(msg))
return
docstr = getattr(f, '__doc__', None)
f = getattr(self, 'args_' + args.command, None)
if f:
parser = argparse.ArgumentParser(
prog=args.command,
description=docstr)
f(parser)
parser.print_help(file=self.stdout)
else:
if not docstr:
docstr = self.nohelp % (args.command,)
self.stdout.write('{0}\n'.format(docstr))
else:
Cmd.do_help(self, '')
|
[
"def",
"do_help",
"(",
"self",
",",
"args",
")",
":",
"if",
"args",
".",
"command",
":",
"f",
"=",
"getattr",
"(",
"self",
",",
"'help_'",
"+",
"args",
".",
"command",
",",
"None",
")",
"if",
"f",
":",
"f",
"(",
")",
"return",
"f",
"=",
"getattr",
"(",
"self",
",",
"'do_'",
"+",
"args",
".",
"command",
",",
"None",
")",
"if",
"not",
"f",
":",
"msg",
"=",
"self",
".",
"nohelp",
"%",
"(",
"args",
".",
"command",
",",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0}\\n'",
".",
"format",
"(",
"msg",
")",
")",
"return",
"docstr",
"=",
"getattr",
"(",
"f",
",",
"'__doc__'",
",",
"None",
")",
"f",
"=",
"getattr",
"(",
"self",
",",
"'args_'",
"+",
"args",
".",
"command",
",",
"None",
")",
"if",
"f",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"prog",
"=",
"args",
".",
"command",
",",
"description",
"=",
"docstr",
")",
"f",
"(",
"parser",
")",
"parser",
".",
"print_help",
"(",
"file",
"=",
"self",
".",
"stdout",
")",
"else",
":",
"if",
"not",
"docstr",
":",
"docstr",
"=",
"self",
".",
"nohelp",
"%",
"(",
"args",
".",
"command",
",",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"'{0}\\n'",
".",
"format",
"(",
"docstr",
")",
")",
"else",
":",
"Cmd",
".",
"do_help",
"(",
"self",
",",
"''",
")"
] |
print help on a command
|
[
"print",
"help",
"on",
"a",
"command"
] |
412e195da29b4f4fc7b72967c192714a6f5eaeb5
|
https://github.com/diffeo/yakonfig/blob/412e195da29b4f4fc7b72967c192714a6f5eaeb5/yakonfig/cmd.py#L156-L183
|
241,624
|
inveniosoftware-attic/invenio-knowledge
|
invenio_knowledge/utils.py
|
load_kb_mappings_file
|
def load_kb_mappings_file(kbname, kbfile, separator):
"""Add KB values from file to given KB returning rows added."""
num_added = 0
with open(kbfile) as kb_fd:
for line in kb_fd:
if not line.strip():
continue
try:
key, value = line.split(separator)
except ValueError:
# bad split, pass
current_app.logger.error("Error splitting: {0}".format(line))
continue
add_kb_mapping(kbname, key, value)
num_added += 1
return num_added
|
python
|
def load_kb_mappings_file(kbname, kbfile, separator):
"""Add KB values from file to given KB returning rows added."""
num_added = 0
with open(kbfile) as kb_fd:
for line in kb_fd:
if not line.strip():
continue
try:
key, value = line.split(separator)
except ValueError:
# bad split, pass
current_app.logger.error("Error splitting: {0}".format(line))
continue
add_kb_mapping(kbname, key, value)
num_added += 1
return num_added
|
[
"def",
"load_kb_mappings_file",
"(",
"kbname",
",",
"kbfile",
",",
"separator",
")",
":",
"num_added",
"=",
"0",
"with",
"open",
"(",
"kbfile",
")",
"as",
"kb_fd",
":",
"for",
"line",
"in",
"kb_fd",
":",
"if",
"not",
"line",
".",
"strip",
"(",
")",
":",
"continue",
"try",
":",
"key",
",",
"value",
"=",
"line",
".",
"split",
"(",
"separator",
")",
"except",
"ValueError",
":",
"# bad split, pass",
"current_app",
".",
"logger",
".",
"error",
"(",
"\"Error splitting: {0}\"",
".",
"format",
"(",
"line",
")",
")",
"continue",
"add_kb_mapping",
"(",
"kbname",
",",
"key",
",",
"value",
")",
"num_added",
"+=",
"1",
"return",
"num_added"
] |
Add KB values from file to given KB returning rows added.
|
[
"Add",
"KB",
"values",
"from",
"file",
"to",
"given",
"KB",
"returning",
"rows",
"added",
"."
] |
b31722dc14243ca8f626f8b3bce9718d0119de55
|
https://github.com/inveniosoftware-attic/invenio-knowledge/blob/b31722dc14243ca8f626f8b3bce9718d0119de55/invenio_knowledge/utils.py#L27-L42
|
241,625
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
SemanticTag.by_id
|
def by_id(cls, semantictag_id, autoflush=True):
'''Return the semantic tag with the given id, or None.
:param semantictag_id: the id of the semantic tag to return
:type semantictag_id: string
:returns: the semantic tag with the given id, or None if there is no tag with
that id
:rtype: ckan.model.semantictag.SemanticTag # TODO check this
'''
query = meta.Session.query(SemanticTag).filter(SemanticTag.id==semantictag_id)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag
|
python
|
def by_id(cls, semantictag_id, autoflush=True):
'''Return the semantic tag with the given id, or None.
:param semantictag_id: the id of the semantic tag to return
:type semantictag_id: string
:returns: the semantic tag with the given id, or None if there is no tag with
that id
:rtype: ckan.model.semantictag.SemanticTag # TODO check this
'''
query = meta.Session.query(SemanticTag).filter(SemanticTag.id==semantictag_id)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag
|
[
"def",
"by_id",
"(",
"cls",
",",
"semantictag_id",
",",
"autoflush",
"=",
"True",
")",
":",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"SemanticTag",
")",
".",
"filter",
"(",
"SemanticTag",
".",
"id",
"==",
"semantictag_id",
")",
"query",
"=",
"query",
".",
"autoflush",
"(",
"autoflush",
")",
"semantictag",
"=",
"query",
".",
"first",
"(",
")",
"return",
"semantictag"
] |
Return the semantic tag with the given id, or None.
:param semantictag_id: the id of the semantic tag to return
:type semantictag_id: string
:returns: the semantic tag with the given id, or None if there is no tag with
that id
:rtype: ckan.model.semantictag.SemanticTag # TODO check this
|
[
"Return",
"the",
"semantic",
"tag",
"with",
"the",
"given",
"id",
"or",
"None",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L60-L74
|
241,626
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
SemanticTag.by_URI
|
def by_URI(cls, URI, label=None, autoflush=True):
'''Return the semantic ag with the given URI, or None.
:param URI: the URI of the semantic tag to return
:type URI: string (URI format)
:param label: URI's label (optional, default: None)
:type label: string
:returns: the semantic tag object with the given id or URI, or None if there is
no tag with that id or name
:rtype: ckan.model.semantictag.SemanticTag #TODO check this
'''
if label:
query = meta.Session.query(SemanticTag).filter(SemanticTag.label==label)
else:
query = meta.Session.query(SemanticTag).filter(SemanticTag.URI==URI)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag
|
python
|
def by_URI(cls, URI, label=None, autoflush=True):
'''Return the semantic ag with the given URI, or None.
:param URI: the URI of the semantic tag to return
:type URI: string (URI format)
:param label: URI's label (optional, default: None)
:type label: string
:returns: the semantic tag object with the given id or URI, or None if there is
no tag with that id or name
:rtype: ckan.model.semantictag.SemanticTag #TODO check this
'''
if label:
query = meta.Session.query(SemanticTag).filter(SemanticTag.label==label)
else:
query = meta.Session.query(SemanticTag).filter(SemanticTag.URI==URI)
query = query.autoflush(autoflush)
semantictag = query.first()
return semantictag
|
[
"def",
"by_URI",
"(",
"cls",
",",
"URI",
",",
"label",
"=",
"None",
",",
"autoflush",
"=",
"True",
")",
":",
"if",
"label",
":",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"SemanticTag",
")",
".",
"filter",
"(",
"SemanticTag",
".",
"label",
"==",
"label",
")",
"else",
":",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"SemanticTag",
")",
".",
"filter",
"(",
"SemanticTag",
".",
"URI",
"==",
"URI",
")",
"query",
"=",
"query",
".",
"autoflush",
"(",
"autoflush",
")",
"semantictag",
"=",
"query",
".",
"first",
"(",
")",
"return",
"semantictag"
] |
Return the semantic ag with the given URI, or None.
:param URI: the URI of the semantic tag to return
:type URI: string (URI format)
:param label: URI's label (optional, default: None)
:type label: string
:returns: the semantic tag object with the given id or URI, or None if there is
no tag with that id or name
:rtype: ckan.model.semantictag.SemanticTag #TODO check this
|
[
"Return",
"the",
"semantic",
"ag",
"with",
"the",
"given",
"URI",
"or",
"None",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L77-L96
|
241,627
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
SemanticTag.get
|
def get(cls, tag_id_or_URI, label=None):
'''Return the tag with the given id or URI, or None.
:param tag_id_or_name: the id or name of the tag to return
:type tag_id_or_name: string
:returns: the tag object with the given id or name, or None if there is
no tag with that id or name
:rtype: ckan.model.tag.Tag
'''
# First try to get the tag by ID.
semantictag = SemanticTag.by_id(tag_id_or_URI)
if semantictag:
return semantictag
else:
semantictag = SemanticTag.by_URI(tag_id_or_URI)
return semantictag
|
python
|
def get(cls, tag_id_or_URI, label=None):
'''Return the tag with the given id or URI, or None.
:param tag_id_or_name: the id or name of the tag to return
:type tag_id_or_name: string
:returns: the tag object with the given id or name, or None if there is
no tag with that id or name
:rtype: ckan.model.tag.Tag
'''
# First try to get the tag by ID.
semantictag = SemanticTag.by_id(tag_id_or_URI)
if semantictag:
return semantictag
else:
semantictag = SemanticTag.by_URI(tag_id_or_URI)
return semantictag
|
[
"def",
"get",
"(",
"cls",
",",
"tag_id_or_URI",
",",
"label",
"=",
"None",
")",
":",
"# First try to get the tag by ID.",
"semantictag",
"=",
"SemanticTag",
".",
"by_id",
"(",
"tag_id_or_URI",
")",
"if",
"semantictag",
":",
"return",
"semantictag",
"else",
":",
"semantictag",
"=",
"SemanticTag",
".",
"by_URI",
"(",
"tag_id_or_URI",
")",
"return",
"semantictag"
] |
Return the tag with the given id or URI, or None.
:param tag_id_or_name: the id or name of the tag to return
:type tag_id_or_name: string
:returns: the tag object with the given id or name, or None if there is
no tag with that id or name
:rtype: ckan.model.tag.Tag
|
[
"Return",
"the",
"tag",
"with",
"the",
"given",
"id",
"or",
"URI",
"or",
"None",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L99-L117
|
241,628
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
SemanticTag.search_by_URI
|
def search_by_URI(cls, search_term):
'''Return all tags whose URI or label contain a given string.
:param search_term: the string to search for in the URI or label names
:type search_term: string
:returns: a list of semantictags that match the search term
:rtype: list of ckan.model.semantictag.SemanticTag objects
'''
#TODO include label search
query = meta.Session.query(SemanticTag)
search_term = search_term.strip().lower()
query = query.filter(SemanticTag.URI.contains(search_term))
query = query.distinct().join(SemanticTag.tag_semantictags)
return query
|
python
|
def search_by_URI(cls, search_term):
'''Return all tags whose URI or label contain a given string.
:param search_term: the string to search for in the URI or label names
:type search_term: string
:returns: a list of semantictags that match the search term
:rtype: list of ckan.model.semantictag.SemanticTag objects
'''
#TODO include label search
query = meta.Session.query(SemanticTag)
search_term = search_term.strip().lower()
query = query.filter(SemanticTag.URI.contains(search_term))
query = query.distinct().join(SemanticTag.tag_semantictags)
return query
|
[
"def",
"search_by_URI",
"(",
"cls",
",",
"search_term",
")",
":",
"#TODO include label search",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"SemanticTag",
")",
"search_term",
"=",
"search_term",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
"query",
"=",
"query",
".",
"filter",
"(",
"SemanticTag",
".",
"URI",
".",
"contains",
"(",
"search_term",
")",
")",
"query",
"=",
"query",
".",
"distinct",
"(",
")",
".",
"join",
"(",
"SemanticTag",
".",
"tag_semantictags",
")",
"return",
"query"
] |
Return all tags whose URI or label contain a given string.
:param search_term: the string to search for in the URI or label names
:type search_term: string
:returns: a list of semantictags that match the search term
:rtype: list of ckan.model.semantictag.SemanticTag objects
|
[
"Return",
"all",
"tags",
"whose",
"URI",
"or",
"label",
"contain",
"a",
"given",
"string",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L121-L136
|
241,629
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
SemanticTag.all
|
def all(cls):
'''Return all tags that are currently applied to any dataset.
:returns: a list of all tags that are currently applied to any dataset
:rtype: list of ckan.model.tag.Tag objects
'''
# if vocab_id_or_name:
# vocab = vocabulary.Vocabulary.get(vocab_id_or_name)
# if vocab is None:
# # The user specified an invalid vocab.
# raise ckan.logic.NotFound("could not find vocabulary '%s'"
# % vocab_id_or_name)
# query = meta.Session.query(Tag).filter(Tag.vocabulary_id==vocab.id)
# else:
query = meta.Session.query(SemanticTag)
query = query.distinct().join(TagSemanticTag)
# query = query.filter_by(state='active')
return query
|
python
|
def all(cls):
'''Return all tags that are currently applied to any dataset.
:returns: a list of all tags that are currently applied to any dataset
:rtype: list of ckan.model.tag.Tag objects
'''
# if vocab_id_or_name:
# vocab = vocabulary.Vocabulary.get(vocab_id_or_name)
# if vocab is None:
# # The user specified an invalid vocab.
# raise ckan.logic.NotFound("could not find vocabulary '%s'"
# % vocab_id_or_name)
# query = meta.Session.query(Tag).filter(Tag.vocabulary_id==vocab.id)
# else:
query = meta.Session.query(SemanticTag)
query = query.distinct().join(TagSemanticTag)
# query = query.filter_by(state='active')
return query
|
[
"def",
"all",
"(",
"cls",
")",
":",
"#\t\tif vocab_id_or_name:",
"#\t\t\tvocab = vocabulary.Vocabulary.get(vocab_id_or_name)",
"#\t\t\tif vocab is None:",
"#\t\t\t\t# The user specified an invalid vocab.",
"#\t\t\t\traise ckan.logic.NotFound(\"could not find vocabulary '%s'\"",
"#\t\t\t\t\t\t% vocab_id_or_name)",
"#\t\t query = meta.Session.query(Tag).filter(Tag.vocabulary_id==vocab.id)",
"#\t\telse:",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"SemanticTag",
")",
"query",
"=",
"query",
".",
"distinct",
"(",
")",
".",
"join",
"(",
"TagSemanticTag",
")",
"#\t\tquery = query.filter_by(state='active')",
"return",
"query"
] |
Return all tags that are currently applied to any dataset.
:returns: a list of all tags that are currently applied to any dataset
:rtype: list of ckan.model.tag.Tag objects
|
[
"Return",
"all",
"tags",
"that",
"are",
"currently",
"applied",
"to",
"any",
"dataset",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L150-L168
|
241,630
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
SemanticTag.tags
|
def tags(self):
'''Return a list of all tags that have this semantic tag, sorted by name.
:rtype: list of ckan.model.tag.Tag objects
'''
q = meta.Session.query(_tag.Tag)
q = q.join(TagSemanticTag)
q = q.filter_by(tag_id=self.id)
# q = q.filter_by(state='active')
q = q.order_by(_tag.Tag.name)
tags = q.all()
return tags
|
python
|
def tags(self):
'''Return a list of all tags that have this semantic tag, sorted by name.
:rtype: list of ckan.model.tag.Tag objects
'''
q = meta.Session.query(_tag.Tag)
q = q.join(TagSemanticTag)
q = q.filter_by(tag_id=self.id)
# q = q.filter_by(state='active')
q = q.order_by(_tag.Tag.name)
tags = q.all()
return tags
|
[
"def",
"tags",
"(",
"self",
")",
":",
"q",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"_tag",
".",
"Tag",
")",
"q",
"=",
"q",
".",
"join",
"(",
"TagSemanticTag",
")",
"q",
"=",
"q",
".",
"filter_by",
"(",
"tag_id",
"=",
"self",
".",
"id",
")",
"#\t\tq = q.filter_by(state='active')",
"q",
"=",
"q",
".",
"order_by",
"(",
"_tag",
".",
"Tag",
".",
"name",
")",
"tags",
"=",
"q",
".",
"all",
"(",
")",
"return",
"tags"
] |
Return a list of all tags that have this semantic tag, sorted by name.
:rtype: list of ckan.model.tag.Tag objects
|
[
"Return",
"a",
"list",
"of",
"all",
"tags",
"that",
"have",
"this",
"semantic",
"tag",
"sorted",
"by",
"name",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L171-L183
|
241,631
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
Predicate.by_id
|
def by_id(cls, predicate_id, autoflush=True):
'''Return the predicate with the given id, or None.
:param predicate_id: the id of the predicate to return
:type predicate_id: string
:returns: the predicate with the given id, or None if there is no predicate with
that id
:rtype: ckan.model.semantictag.Predicate
'''
query = meta.Session.query(Predicate).filter(Predicate.id==predicate_id)
query = query.autoflush(autoflush)
return query.first()
|
python
|
def by_id(cls, predicate_id, autoflush=True):
'''Return the predicate with the given id, or None.
:param predicate_id: the id of the predicate to return
:type predicate_id: string
:returns: the predicate with the given id, or None if there is no predicate with
that id
:rtype: ckan.model.semantictag.Predicate
'''
query = meta.Session.query(Predicate).filter(Predicate.id==predicate_id)
query = query.autoflush(autoflush)
return query.first()
|
[
"def",
"by_id",
"(",
"cls",
",",
"predicate_id",
",",
"autoflush",
"=",
"True",
")",
":",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"Predicate",
")",
".",
"filter",
"(",
"Predicate",
".",
"id",
"==",
"predicate_id",
")",
"query",
"=",
"query",
".",
"autoflush",
"(",
"autoflush",
")",
"return",
"query",
".",
"first",
"(",
")"
] |
Return the predicate with the given id, or None.
:param predicate_id: the id of the predicate to return
:type predicate_id: string
:returns: the predicate with the given id, or None if there is no predicate with
that id
:rtype: ckan.model.semantictag.Predicate
|
[
"Return",
"the",
"predicate",
"with",
"the",
"given",
"id",
"or",
"None",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L204-L217
|
241,632
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
Predicate.list_unique
|
def list_unique(cls):
'''Return all unique namespaces
:returns: a list of all predicates
:rtype: list of ckan.model.semantictag.Predicate objects
'''
query = meta.Session.query(Predicate).distinct(Predicate.namespace)
return query.all()
|
python
|
def list_unique(cls):
'''Return all unique namespaces
:returns: a list of all predicates
:rtype: list of ckan.model.semantictag.Predicate objects
'''
query = meta.Session.query(Predicate).distinct(Predicate.namespace)
return query.all()
|
[
"def",
"list_unique",
"(",
"cls",
")",
":",
"query",
"=",
"meta",
".",
"Session",
".",
"query",
"(",
"Predicate",
")",
".",
"distinct",
"(",
"Predicate",
".",
"namespace",
")",
"return",
"query",
".",
"all",
"(",
")"
] |
Return all unique namespaces
:returns: a list of all predicates
:rtype: list of ckan.model.semantictag.Predicate objects
|
[
"Return",
"all",
"unique",
"namespaces"
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L252-L260
|
241,633
|
alantygel/ckanext-semantictags
|
ckanext/semantictags/db.py
|
TagSemanticTag.by_name
|
def by_name(self, tag_name, semantictag_URI,
autoflush=True):
'''Return the TagSemanticTag for the given tag name and semantic tag URI, or None.
:param tag_name: the name of the tag to look for
:type tag_name: string
:param tag_URI: the name of the tag to look for
:type tag_URI: string
:returns: the TagSemanticTag for the given tag name and semantic tag URI, or None
if there is no TagSemanticTag for those semantic tag and tag names
:rtype: ckan.model.tag_semanictag.TagSemanticTag
'''
query = (meta.Session.query(TagSemanticTag)
.filter(_tag.Tag.name==tag_name)
.filter(SemanticTag.URI==semantictag_URI))
query = query.autoflush(autoflush)
return query.one()[0]
|
python
|
def by_name(self, tag_name, semantictag_URI,
autoflush=True):
'''Return the TagSemanticTag for the given tag name and semantic tag URI, or None.
:param tag_name: the name of the tag to look for
:type tag_name: string
:param tag_URI: the name of the tag to look for
:type tag_URI: string
:returns: the TagSemanticTag for the given tag name and semantic tag URI, or None
if there is no TagSemanticTag for those semantic tag and tag names
:rtype: ckan.model.tag_semanictag.TagSemanticTag
'''
query = (meta.Session.query(TagSemanticTag)
.filter(_tag.Tag.name==tag_name)
.filter(SemanticTag.URI==semantictag_URI))
query = query.autoflush(autoflush)
return query.one()[0]
|
[
"def",
"by_name",
"(",
"self",
",",
"tag_name",
",",
"semantictag_URI",
",",
"autoflush",
"=",
"True",
")",
":",
"query",
"=",
"(",
"meta",
".",
"Session",
".",
"query",
"(",
"TagSemanticTag",
")",
".",
"filter",
"(",
"_tag",
".",
"Tag",
".",
"name",
"==",
"tag_name",
")",
".",
"filter",
"(",
"SemanticTag",
".",
"URI",
"==",
"semantictag_URI",
")",
")",
"query",
"=",
"query",
".",
"autoflush",
"(",
"autoflush",
")",
"return",
"query",
".",
"one",
"(",
")",
"[",
"0",
"]"
] |
Return the TagSemanticTag for the given tag name and semantic tag URI, or None.
:param tag_name: the name of the tag to look for
:type tag_name: string
:param tag_URI: the name of the tag to look for
:type tag_URI: string
:returns: the TagSemanticTag for the given tag name and semantic tag URI, or None
if there is no TagSemanticTag for those semantic tag and tag names
:rtype: ckan.model.tag_semanictag.TagSemanticTag
|
[
"Return",
"the",
"TagSemanticTag",
"for",
"the",
"given",
"tag",
"name",
"and",
"semantic",
"tag",
"URI",
"or",
"None",
"."
] |
10bb31d29f34b2b5a6feae693961842f93007ce1
|
https://github.com/alantygel/ckanext-semantictags/blob/10bb31d29f34b2b5a6feae693961842f93007ce1/ckanext/semantictags/db.py#L305-L324
|
241,634
|
cabalgata/cabalgata-silla-de-montar
|
cabalgata/silla/util/disk.py
|
temp_directory
|
def temp_directory(*args, **kwargs):
"""
Context manager returns a path created by mkdtemp and cleans it up afterwards.
"""
path = tempfile.mkdtemp(*args, **kwargs)
try:
yield path
finally:
shutil.rmtree(path)
|
python
|
def temp_directory(*args, **kwargs):
"""
Context manager returns a path created by mkdtemp and cleans it up afterwards.
"""
path = tempfile.mkdtemp(*args, **kwargs)
try:
yield path
finally:
shutil.rmtree(path)
|
[
"def",
"temp_directory",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"try",
":",
"yield",
"path",
"finally",
":",
"shutil",
".",
"rmtree",
"(",
"path",
")"
] |
Context manager returns a path created by mkdtemp and cleans it up afterwards.
|
[
"Context",
"manager",
"returns",
"a",
"path",
"created",
"by",
"mkdtemp",
"and",
"cleans",
"it",
"up",
"afterwards",
"."
] |
6f1de56f207e55d788d56636f623c0e3ce1aa750
|
https://github.com/cabalgata/cabalgata-silla-de-montar/blob/6f1de56f207e55d788d56636f623c0e3ce1aa750/cabalgata/silla/util/disk.py#L27-L36
|
241,635
|
mjalas/messaging-client
|
messaging_client/option_parser.py
|
DefaultOptionParser.parse
|
def parse(self):
"""Parse command line arguments and options.
Returns:
Dictionary containing all given command line arguments and options.
"""
(options, args) = self.parser.parse_args()
self._set_attributes(args, options)
return self._create_dictionary()
|
python
|
def parse(self):
"""Parse command line arguments and options.
Returns:
Dictionary containing all given command line arguments and options.
"""
(options, args) = self.parser.parse_args()
self._set_attributes(args, options)
return self._create_dictionary()
|
[
"def",
"parse",
"(",
"self",
")",
":",
"(",
"options",
",",
"args",
")",
"=",
"self",
".",
"parser",
".",
"parse_args",
"(",
")",
"self",
".",
"_set_attributes",
"(",
"args",
",",
"options",
")",
"return",
"self",
".",
"_create_dictionary",
"(",
")"
] |
Parse command line arguments and options.
Returns:
Dictionary containing all given command line arguments and options.
|
[
"Parse",
"command",
"line",
"arguments",
"and",
"options",
"."
] |
b72ad622d9c94a879fe1085f0dbb52349892cd15
|
https://github.com/mjalas/messaging-client/blob/b72ad622d9c94a879fe1085f0dbb52349892cd15/messaging_client/option_parser.py#L58-L66
|
241,636
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
WinDir.prt_detail
|
def prt_detail(self):
"""Nicely print stats information.
"""
screen = [
"Detail info of %s: " % self.abspath,
"total size = %s" % string_SizeInBytes(self.size_total),
"number of sub folders = %s" % self.num_folder_total,
"number of total files = %s" % self.num_file_total,
"lvl 1 file size = %s" % string_SizeInBytes(self.size_current),
"lvl 1 folder number = %s" % self.num_folder_current,
"lvl 1 file number = %s" % self.num_file_current,
]
print("\n".join(screen))
|
python
|
def prt_detail(self):
"""Nicely print stats information.
"""
screen = [
"Detail info of %s: " % self.abspath,
"total size = %s" % string_SizeInBytes(self.size_total),
"number of sub folders = %s" % self.num_folder_total,
"number of total files = %s" % self.num_file_total,
"lvl 1 file size = %s" % string_SizeInBytes(self.size_current),
"lvl 1 folder number = %s" % self.num_folder_current,
"lvl 1 file number = %s" % self.num_file_current,
]
print("\n".join(screen))
|
[
"def",
"prt_detail",
"(",
"self",
")",
":",
"screen",
"=",
"[",
"\"Detail info of %s: \"",
"%",
"self",
".",
"abspath",
",",
"\"total size = %s\"",
"%",
"string_SizeInBytes",
"(",
"self",
".",
"size_total",
")",
",",
"\"number of sub folders = %s\"",
"%",
"self",
".",
"num_folder_total",
",",
"\"number of total files = %s\"",
"%",
"self",
".",
"num_file_total",
",",
"\"lvl 1 file size = %s\"",
"%",
"string_SizeInBytes",
"(",
"self",
".",
"size_current",
")",
",",
"\"lvl 1 folder number = %s\"",
"%",
"self",
".",
"num_folder_current",
",",
"\"lvl 1 file number = %s\"",
"%",
"self",
".",
"num_file_current",
",",
"]",
"print",
"(",
"\"\\n\"",
".",
"join",
"(",
"screen",
")",
")"
] |
Nicely print stats information.
|
[
"Nicely",
"print",
"stats",
"information",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L411-L423
|
241,637
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
FileCollection.add
|
def add(self, abspath_or_winfile, enable_verbose=True):
"""Add absolute path or WinFile to FileCollection.
"""
if isinstance(abspath_or_winfile, str): # abspath
if abspath_or_winfile in self.files:
if enable_verbose:
print("'%s' already in this collections" %
abspath_or_winfile)
else:
self.files.setdefault(abspath_or_winfile, WinFile(abspath_or_winfile))
elif isinstance(abspath_or_winfile, WinFile): # WinFile
if abspath_or_winfile.abspath in self.files:
if enable_verbose:
print("'%s' already in this collections" %
abspath_or_winfile)
else:
self.files.setdefault(abspath_or_winfile.abspath, abspath_or_winfile)
else:
raise TypeError
|
python
|
def add(self, abspath_or_winfile, enable_verbose=True):
"""Add absolute path or WinFile to FileCollection.
"""
if isinstance(abspath_or_winfile, str): # abspath
if abspath_or_winfile in self.files:
if enable_verbose:
print("'%s' already in this collections" %
abspath_or_winfile)
else:
self.files.setdefault(abspath_or_winfile, WinFile(abspath_or_winfile))
elif isinstance(abspath_or_winfile, WinFile): # WinFile
if abspath_or_winfile.abspath in self.files:
if enable_verbose:
print("'%s' already in this collections" %
abspath_or_winfile)
else:
self.files.setdefault(abspath_or_winfile.abspath, abspath_or_winfile)
else:
raise TypeError
|
[
"def",
"add",
"(",
"self",
",",
"abspath_or_winfile",
",",
"enable_verbose",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"abspath_or_winfile",
",",
"str",
")",
":",
"# abspath",
"if",
"abspath_or_winfile",
"in",
"self",
".",
"files",
":",
"if",
"enable_verbose",
":",
"print",
"(",
"\"'%s' already in this collections\"",
"%",
"abspath_or_winfile",
")",
"else",
":",
"self",
".",
"files",
".",
"setdefault",
"(",
"abspath_or_winfile",
",",
"WinFile",
"(",
"abspath_or_winfile",
")",
")",
"elif",
"isinstance",
"(",
"abspath_or_winfile",
",",
"WinFile",
")",
":",
"# WinFile",
"if",
"abspath_or_winfile",
".",
"abspath",
"in",
"self",
".",
"files",
":",
"if",
"enable_verbose",
":",
"print",
"(",
"\"'%s' already in this collections\"",
"%",
"abspath_or_winfile",
")",
"else",
":",
"self",
".",
"files",
".",
"setdefault",
"(",
"abspath_or_winfile",
".",
"abspath",
",",
"abspath_or_winfile",
")",
"else",
":",
"raise",
"TypeError"
] |
Add absolute path or WinFile to FileCollection.
|
[
"Add",
"absolute",
"path",
"or",
"WinFile",
"to",
"FileCollection",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L505-L523
|
241,638
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
FileCollection.remove
|
def remove(self, abspath_or_winfile, enable_verbose=True):
"""Remove absolute path or WinFile from FileCollection.
"""
if isinstance(abspath_or_winfile, str): # abspath
try:
del self.files[abspath_or_winfile]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
elif isinstance(abspath_or_winfile, WinFile): # WinFile
try:
del self.files[abspath_or_winfile.abspath]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
else:
raise TypeError
|
python
|
def remove(self, abspath_or_winfile, enable_verbose=True):
"""Remove absolute path or WinFile from FileCollection.
"""
if isinstance(abspath_or_winfile, str): # abspath
try:
del self.files[abspath_or_winfile]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
elif isinstance(abspath_or_winfile, WinFile): # WinFile
try:
del self.files[abspath_or_winfile.abspath]
except KeyError:
if enable_verbose:
print("'%s' are not in this file collections" %
abspath_or_winfile)
else:
raise TypeError
|
[
"def",
"remove",
"(",
"self",
",",
"abspath_or_winfile",
",",
"enable_verbose",
"=",
"True",
")",
":",
"if",
"isinstance",
"(",
"abspath_or_winfile",
",",
"str",
")",
":",
"# abspath",
"try",
":",
"del",
"self",
".",
"files",
"[",
"abspath_or_winfile",
"]",
"except",
"KeyError",
":",
"if",
"enable_verbose",
":",
"print",
"(",
"\"'%s' are not in this file collections\"",
"%",
"abspath_or_winfile",
")",
"elif",
"isinstance",
"(",
"abspath_or_winfile",
",",
"WinFile",
")",
":",
"# WinFile",
"try",
":",
"del",
"self",
".",
"files",
"[",
"abspath_or_winfile",
".",
"abspath",
"]",
"except",
"KeyError",
":",
"if",
"enable_verbose",
":",
"print",
"(",
"\"'%s' are not in this file collections\"",
"%",
"abspath_or_winfile",
")",
"else",
":",
"raise",
"TypeError"
] |
Remove absolute path or WinFile from FileCollection.
|
[
"Remove",
"absolute",
"path",
"or",
"WinFile",
"from",
"FileCollection",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L525-L543
|
241,639
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
FileCollection.iterfiles
|
def iterfiles(self):
"""Yield all WinFile object.
"""
try:
for path in self.order:
yield self.files[path]
except:
for winfile in self.files.values():
yield winfile
|
python
|
def iterfiles(self):
"""Yield all WinFile object.
"""
try:
for path in self.order:
yield self.files[path]
except:
for winfile in self.files.values():
yield winfile
|
[
"def",
"iterfiles",
"(",
"self",
")",
":",
"try",
":",
"for",
"path",
"in",
"self",
".",
"order",
":",
"yield",
"self",
".",
"files",
"[",
"path",
"]",
"except",
":",
"for",
"winfile",
"in",
"self",
".",
"files",
".",
"values",
"(",
")",
":",
"yield",
"winfile"
] |
Yield all WinFile object.
|
[
"Yield",
"all",
"WinFile",
"object",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L551-L559
|
241,640
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
FileCollection.iterpaths
|
def iterpaths(self):
"""Yield all WinFile's absolute path.
"""
try:
for path in self.order:
yield path
except:
for path in self.files:
yield path
|
python
|
def iterpaths(self):
"""Yield all WinFile's absolute path.
"""
try:
for path in self.order:
yield path
except:
for path in self.files:
yield path
|
[
"def",
"iterpaths",
"(",
"self",
")",
":",
"try",
":",
"for",
"path",
"in",
"self",
".",
"order",
":",
"yield",
"path",
"except",
":",
"for",
"path",
"in",
"self",
".",
"files",
":",
"yield",
"path"
] |
Yield all WinFile's absolute path.
|
[
"Yield",
"all",
"WinFile",
"s",
"absolute",
"path",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L561-L569
|
241,641
|
MacHu-GWU/angora-project
|
angora/filesystem/filesystem.py
|
FileCollection.show_patterned_file
|
def show_patterned_file(dir_path, pattern=list(), filename_only=True):
"""Print all file that file name contains ``pattern``.
"""
pattern = [i.lower() for i in pattern]
if filename_only:
def filter(winfile):
for p in pattern:
if p in winfile.fname.lower():
return True
return False
else:
def filter(winfile):
for p in pattern:
if p in winfile.abspath.lower():
return True
return False
fc = FileCollection.from_path_by_criterion(
dir_path, filter, keepboth=False)
if filename_only:
fc.sort_by("fname")
else:
fc.sort_by("abspath")
table = {p: "<%s>" % p for p in pattern}
lines = list()
lines.append("Results:")
for winfile in fc.iterfiles():
lines.append(" %s" % winfile)
if filename_only:
lines.append("Above are all files that file name contains %s" % pattern)
else:
lines.append("Above are all files that abspath contains %s" % pattern)
text = "\n".join(lines)
print(text)
with open("__show_patterned_file__.log", "wb") as f:
f.write(text.encode("utf-8"))
|
python
|
def show_patterned_file(dir_path, pattern=list(), filename_only=True):
"""Print all file that file name contains ``pattern``.
"""
pattern = [i.lower() for i in pattern]
if filename_only:
def filter(winfile):
for p in pattern:
if p in winfile.fname.lower():
return True
return False
else:
def filter(winfile):
for p in pattern:
if p in winfile.abspath.lower():
return True
return False
fc = FileCollection.from_path_by_criterion(
dir_path, filter, keepboth=False)
if filename_only:
fc.sort_by("fname")
else:
fc.sort_by("abspath")
table = {p: "<%s>" % p for p in pattern}
lines = list()
lines.append("Results:")
for winfile in fc.iterfiles():
lines.append(" %s" % winfile)
if filename_only:
lines.append("Above are all files that file name contains %s" % pattern)
else:
lines.append("Above are all files that abspath contains %s" % pattern)
text = "\n".join(lines)
print(text)
with open("__show_patterned_file__.log", "wb") as f:
f.write(text.encode("utf-8"))
|
[
"def",
"show_patterned_file",
"(",
"dir_path",
",",
"pattern",
"=",
"list",
"(",
")",
",",
"filename_only",
"=",
"True",
")",
":",
"pattern",
"=",
"[",
"i",
".",
"lower",
"(",
")",
"for",
"i",
"in",
"pattern",
"]",
"if",
"filename_only",
":",
"def",
"filter",
"(",
"winfile",
")",
":",
"for",
"p",
"in",
"pattern",
":",
"if",
"p",
"in",
"winfile",
".",
"fname",
".",
"lower",
"(",
")",
":",
"return",
"True",
"return",
"False",
"else",
":",
"def",
"filter",
"(",
"winfile",
")",
":",
"for",
"p",
"in",
"pattern",
":",
"if",
"p",
"in",
"winfile",
".",
"abspath",
".",
"lower",
"(",
")",
":",
"return",
"True",
"return",
"False",
"fc",
"=",
"FileCollection",
".",
"from_path_by_criterion",
"(",
"dir_path",
",",
"filter",
",",
"keepboth",
"=",
"False",
")",
"if",
"filename_only",
":",
"fc",
".",
"sort_by",
"(",
"\"fname\"",
")",
"else",
":",
"fc",
".",
"sort_by",
"(",
"\"abspath\"",
")",
"table",
"=",
"{",
"p",
":",
"\"<%s>\"",
"%",
"p",
"for",
"p",
"in",
"pattern",
"}",
"lines",
"=",
"list",
"(",
")",
"lines",
".",
"append",
"(",
"\"Results:\"",
")",
"for",
"winfile",
"in",
"fc",
".",
"iterfiles",
"(",
")",
":",
"lines",
".",
"append",
"(",
"\" %s\"",
"%",
"winfile",
")",
"if",
"filename_only",
":",
"lines",
".",
"append",
"(",
"\"Above are all files that file name contains %s\"",
"%",
"pattern",
")",
"else",
":",
"lines",
".",
"append",
"(",
"\"Above are all files that abspath contains %s\"",
"%",
"pattern",
")",
"text",
"=",
"\"\\n\"",
".",
"join",
"(",
"lines",
")",
"print",
"(",
"text",
")",
"with",
"open",
"(",
"\"__show_patterned_file__.log\"",
",",
"\"wb\"",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"text",
".",
"encode",
"(",
"\"utf-8\"",
")",
")"
] |
Print all file that file name contains ``pattern``.
|
[
"Print",
"all",
"file",
"that",
"file",
"name",
"contains",
"pattern",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/filesystem/filesystem.py#L1125-L1163
|
241,642
|
Naught0/lolrune
|
lolrune/aioruneclient.py
|
AioRuneClient._get
|
async def _get(self, url: str) -> str:
"""A small wrapper method which makes a quick GET request
Parameters
----------
url : str
The URL to get.
Returns
-------
str
The raw html of the requested page.
Raises
------
RuneConnectionError
If the GET response status is not 200.
"""
async with self.session.get(url, headers=self.HEADERS) as r:
if r.status == 200:
return await r.text()
else:
raise RuneConnectionError(r.status)
|
python
|
async def _get(self, url: str) -> str:
"""A small wrapper method which makes a quick GET request
Parameters
----------
url : str
The URL to get.
Returns
-------
str
The raw html of the requested page.
Raises
------
RuneConnectionError
If the GET response status is not 200.
"""
async with self.session.get(url, headers=self.HEADERS) as r:
if r.status == 200:
return await r.text()
else:
raise RuneConnectionError(r.status)
|
[
"async",
"def",
"_get",
"(",
"self",
",",
"url",
":",
"str",
")",
"->",
"str",
":",
"async",
"with",
"self",
".",
"session",
".",
"get",
"(",
"url",
",",
"headers",
"=",
"self",
".",
"HEADERS",
")",
"as",
"r",
":",
"if",
"r",
".",
"status",
"==",
"200",
":",
"return",
"await",
"r",
".",
"text",
"(",
")",
"else",
":",
"raise",
"RuneConnectionError",
"(",
"r",
".",
"status",
")"
] |
A small wrapper method which makes a quick GET request
Parameters
----------
url : str
The URL to get.
Returns
-------
str
The raw html of the requested page.
Raises
------
RuneConnectionError
If the GET response status is not 200.
|
[
"A",
"small",
"wrapper",
"method",
"which",
"makes",
"a",
"quick",
"GET",
"request"
] |
99f67b9137e42a78198ba369ceb371e473759f11
|
https://github.com/Naught0/lolrune/blob/99f67b9137e42a78198ba369ceb371e473759f11/lolrune/aioruneclient.py#L59-L81
|
241,643
|
redbridge/molnctrl
|
molnctrl/csobjects.py
|
Virtualmachine.update
|
def update(self):
""" Update the state """
vm = self._cs_api.list_virtualmachines(id=self.id)[0]
self.is_running = self._is_running(vm.state)
|
python
|
def update(self):
""" Update the state """
vm = self._cs_api.list_virtualmachines(id=self.id)[0]
self.is_running = self._is_running(vm.state)
|
[
"def",
"update",
"(",
"self",
")",
":",
"vm",
"=",
"self",
".",
"_cs_api",
".",
"list_virtualmachines",
"(",
"id",
"=",
"self",
".",
"id",
")",
"[",
"0",
"]",
"self",
".",
"is_running",
"=",
"self",
".",
"_is_running",
"(",
"vm",
".",
"state",
")"
] |
Update the state
|
[
"Update",
"the",
"state"
] |
9990ae7e522ce364bb61a735f774dc28de5f8e60
|
https://github.com/redbridge/molnctrl/blob/9990ae7e522ce364bb61a735f774dc28de5f8e60/molnctrl/csobjects.py#L155-L158
|
241,644
|
litters/shrew
|
shrew/utils/auth.py
|
unlock_keychain
|
def unlock_keychain(username):
""" If the user is running via SSH, their Keychain must be unlocked first. """
if 'SSH_TTY' not in os.environ:
return
# Don't unlock if we've already seen this user.
if username in _unlocked:
return
_unlocked.add(username)
if sys.platform == 'darwin':
sys.stderr.write("You are running under SSH. Please unlock your local OS X KeyChain:\n")
subprocess.call(['security', 'unlock-keychain'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
python
|
def unlock_keychain(username):
""" If the user is running via SSH, their Keychain must be unlocked first. """
if 'SSH_TTY' not in os.environ:
return
# Don't unlock if we've already seen this user.
if username in _unlocked:
return
_unlocked.add(username)
if sys.platform == 'darwin':
sys.stderr.write("You are running under SSH. Please unlock your local OS X KeyChain:\n")
subprocess.call(['security', 'unlock-keychain'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
[
"def",
"unlock_keychain",
"(",
"username",
")",
":",
"if",
"'SSH_TTY'",
"not",
"in",
"os",
".",
"environ",
":",
"return",
"# Don't unlock if we've already seen this user.",
"if",
"username",
"in",
"_unlocked",
":",
"return",
"_unlocked",
".",
"add",
"(",
"username",
")",
"if",
"sys",
".",
"platform",
"==",
"'darwin'",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"You are running under SSH. Please unlock your local OS X KeyChain:\\n\"",
")",
"subprocess",
".",
"call",
"(",
"[",
"'security'",
",",
"'unlock-keychain'",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")"
] |
If the user is running via SSH, their Keychain must be unlocked first.
|
[
"If",
"the",
"user",
"is",
"running",
"via",
"SSH",
"their",
"Keychain",
"must",
"be",
"unlocked",
"first",
"."
] |
ed4b1879321d858d6bc884d14fea7557372a4d41
|
https://github.com/litters/shrew/blob/ed4b1879321d858d6bc884d14fea7557372a4d41/shrew/utils/auth.py#L133-L147
|
241,645
|
litters/shrew
|
shrew/utils/auth.py
|
save_password
|
def save_password(entry, password, username=None):
"""
Saves the given password in the user's keychain.
:param entry: The entry in the keychain. This is a caller specific key.
:param password: The password to save in the keychain.
:param username: The username to get the password for. Default is the current user.
"""
if username is None:
username = get_username()
has_keychain = initialize_keychain()
if has_keychain:
try:
keyring.set_password(entry, username, password)
except Exception as e:
log.warn("Unable to set password in keyring. Continuing..")
log.debug(e)
|
python
|
def save_password(entry, password, username=None):
"""
Saves the given password in the user's keychain.
:param entry: The entry in the keychain. This is a caller specific key.
:param password: The password to save in the keychain.
:param username: The username to get the password for. Default is the current user.
"""
if username is None:
username = get_username()
has_keychain = initialize_keychain()
if has_keychain:
try:
keyring.set_password(entry, username, password)
except Exception as e:
log.warn("Unable to set password in keyring. Continuing..")
log.debug(e)
|
[
"def",
"save_password",
"(",
"entry",
",",
"password",
",",
"username",
"=",
"None",
")",
":",
"if",
"username",
"is",
"None",
":",
"username",
"=",
"get_username",
"(",
")",
"has_keychain",
"=",
"initialize_keychain",
"(",
")",
"if",
"has_keychain",
":",
"try",
":",
"keyring",
".",
"set_password",
"(",
"entry",
",",
"username",
",",
"password",
")",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"warn",
"(",
"\"Unable to set password in keyring. Continuing..\"",
")",
"log",
".",
"debug",
"(",
"e",
")"
] |
Saves the given password in the user's keychain.
:param entry: The entry in the keychain. This is a caller specific key.
:param password: The password to save in the keychain.
:param username: The username to get the password for. Default is the current user.
|
[
"Saves",
"the",
"given",
"password",
"in",
"the",
"user",
"s",
"keychain",
"."
] |
ed4b1879321d858d6bc884d14fea7557372a4d41
|
https://github.com/litters/shrew/blob/ed4b1879321d858d6bc884d14fea7557372a4d41/shrew/utils/auth.py#L150-L169
|
241,646
|
litters/shrew
|
shrew/utils/auth.py
|
remove_password
|
def remove_password(entry, username=None):
"""
Removes the password for the specific user in the user's keychain.
:param entry: The entry in the keychain. This is a caller specific key.
:param username: The username whose password is to be removed. Default is the current user.
"""
if username is None:
username = get_username()
has_keychain = initialize_keychain()
if has_keychain:
try:
keyring.delete_password(entry, username)
except Exception as e:
print e
log.warn("Unable to delete password in keyring. Continuing..")
log.debug(e)
|
python
|
def remove_password(entry, username=None):
"""
Removes the password for the specific user in the user's keychain.
:param entry: The entry in the keychain. This is a caller specific key.
:param username: The username whose password is to be removed. Default is the current user.
"""
if username is None:
username = get_username()
has_keychain = initialize_keychain()
if has_keychain:
try:
keyring.delete_password(entry, username)
except Exception as e:
print e
log.warn("Unable to delete password in keyring. Continuing..")
log.debug(e)
|
[
"def",
"remove_password",
"(",
"entry",
",",
"username",
"=",
"None",
")",
":",
"if",
"username",
"is",
"None",
":",
"username",
"=",
"get_username",
"(",
")",
"has_keychain",
"=",
"initialize_keychain",
"(",
")",
"if",
"has_keychain",
":",
"try",
":",
"keyring",
".",
"delete_password",
"(",
"entry",
",",
"username",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"e",
"log",
".",
"warn",
"(",
"\"Unable to delete password in keyring. Continuing..\"",
")",
"log",
".",
"debug",
"(",
"e",
")"
] |
Removes the password for the specific user in the user's keychain.
:param entry: The entry in the keychain. This is a caller specific key.
:param username: The username whose password is to be removed. Default is the current user.
|
[
"Removes",
"the",
"password",
"for",
"the",
"specific",
"user",
"in",
"the",
"user",
"s",
"keychain",
"."
] |
ed4b1879321d858d6bc884d14fea7557372a4d41
|
https://github.com/litters/shrew/blob/ed4b1879321d858d6bc884d14fea7557372a4d41/shrew/utils/auth.py#L172-L191
|
241,647
|
litters/shrew
|
shrew/utils/auth.py
|
get_password
|
def get_password(entry=None, username=None, prompt=None, always_ask=False):
"""
Prompt the user for a password on stdin.
:param username: The username to get the password for. Default is the current user.
:param entry: The entry in the keychain. This is a caller specific key.
:param prompt: The entry in the keychain. This is a caller specific key.
:param always_ask: Force the user to enter the password every time.
"""
password = None
if username is None:
username = get_username()
has_keychain = initialize_keychain()
# Unlock the user's keychain otherwise, if running under SSH, 'security(1)' will thrown an error.
unlock_keychain(username)
if prompt is None:
prompt = "Enter %s's password: " % username
if has_keychain and entry is not None and always_ask is False:
password = get_password_from_keyring(entry, username)
if password is None:
password = getpass.getpass(prompt=prompt)
return password
|
python
|
def get_password(entry=None, username=None, prompt=None, always_ask=False):
"""
Prompt the user for a password on stdin.
:param username: The username to get the password for. Default is the current user.
:param entry: The entry in the keychain. This is a caller specific key.
:param prompt: The entry in the keychain. This is a caller specific key.
:param always_ask: Force the user to enter the password every time.
"""
password = None
if username is None:
username = get_username()
has_keychain = initialize_keychain()
# Unlock the user's keychain otherwise, if running under SSH, 'security(1)' will thrown an error.
unlock_keychain(username)
if prompt is None:
prompt = "Enter %s's password: " % username
if has_keychain and entry is not None and always_ask is False:
password = get_password_from_keyring(entry, username)
if password is None:
password = getpass.getpass(prompt=prompt)
return password
|
[
"def",
"get_password",
"(",
"entry",
"=",
"None",
",",
"username",
"=",
"None",
",",
"prompt",
"=",
"None",
",",
"always_ask",
"=",
"False",
")",
":",
"password",
"=",
"None",
"if",
"username",
"is",
"None",
":",
"username",
"=",
"get_username",
"(",
")",
"has_keychain",
"=",
"initialize_keychain",
"(",
")",
"# Unlock the user's keychain otherwise, if running under SSH, 'security(1)' will thrown an error.",
"unlock_keychain",
"(",
"username",
")",
"if",
"prompt",
"is",
"None",
":",
"prompt",
"=",
"\"Enter %s's password: \"",
"%",
"username",
"if",
"has_keychain",
"and",
"entry",
"is",
"not",
"None",
"and",
"always_ask",
"is",
"False",
":",
"password",
"=",
"get_password_from_keyring",
"(",
"entry",
",",
"username",
")",
"if",
"password",
"is",
"None",
":",
"password",
"=",
"getpass",
".",
"getpass",
"(",
"prompt",
"=",
"prompt",
")",
"return",
"password"
] |
Prompt the user for a password on stdin.
:param username: The username to get the password for. Default is the current user.
:param entry: The entry in the keychain. This is a caller specific key.
:param prompt: The entry in the keychain. This is a caller specific key.
:param always_ask: Force the user to enter the password every time.
|
[
"Prompt",
"the",
"user",
"for",
"a",
"password",
"on",
"stdin",
"."
] |
ed4b1879321d858d6bc884d14fea7557372a4d41
|
https://github.com/litters/shrew/blob/ed4b1879321d858d6bc884d14fea7557372a4d41/shrew/utils/auth.py#L194-L223
|
241,648
|
litters/shrew
|
shrew/utils/auth.py
|
validate_password
|
def validate_password(entry, username, check_function, password=None, retries=1, save_on_success=True, prompt=None, **check_args):
"""
Validate a password with a check function & retry if the password is incorrect.
Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync.
:param str entry: The keychain entry to fetch a password from.
:param str username: The username to authenticate
:param func check_function: Check function to use. Should take (username, password, **check_args)
:param str password: The password to validate. If `None`, the user will be prompted.
:param int retries: Number of retries to prompt the user for.
:param bool save_on_success: Save the password if the validation was successful.
:param str prompt: Alternate prompt to use when asking for the user's password.
:returns: `True` on successful authentication. `False` otherwise.
:rtype: bool
"""
if password is None:
password = get_password(entry, username, prompt)
for _ in xrange(retries + 1):
if check_function(username, password, **check_args):
if save_on_success:
save_password(entry, password, username)
return True
log.error("Couldn't successfully authenticate your username & password..")
password = get_password(entry, username, prompt, always_ask=True)
return False
|
python
|
def validate_password(entry, username, check_function, password=None, retries=1, save_on_success=True, prompt=None, **check_args):
"""
Validate a password with a check function & retry if the password is incorrect.
Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync.
:param str entry: The keychain entry to fetch a password from.
:param str username: The username to authenticate
:param func check_function: Check function to use. Should take (username, password, **check_args)
:param str password: The password to validate. If `None`, the user will be prompted.
:param int retries: Number of retries to prompt the user for.
:param bool save_on_success: Save the password if the validation was successful.
:param str prompt: Alternate prompt to use when asking for the user's password.
:returns: `True` on successful authentication. `False` otherwise.
:rtype: bool
"""
if password is None:
password = get_password(entry, username, prompt)
for _ in xrange(retries + 1):
if check_function(username, password, **check_args):
if save_on_success:
save_password(entry, password, username)
return True
log.error("Couldn't successfully authenticate your username & password..")
password = get_password(entry, username, prompt, always_ask=True)
return False
|
[
"def",
"validate_password",
"(",
"entry",
",",
"username",
",",
"check_function",
",",
"password",
"=",
"None",
",",
"retries",
"=",
"1",
",",
"save_on_success",
"=",
"True",
",",
"prompt",
"=",
"None",
",",
"*",
"*",
"check_args",
")",
":",
"if",
"password",
"is",
"None",
":",
"password",
"=",
"get_password",
"(",
"entry",
",",
"username",
",",
"prompt",
")",
"for",
"_",
"in",
"xrange",
"(",
"retries",
"+",
"1",
")",
":",
"if",
"check_function",
"(",
"username",
",",
"password",
",",
"*",
"*",
"check_args",
")",
":",
"if",
"save_on_success",
":",
"save_password",
"(",
"entry",
",",
"password",
",",
"username",
")",
"return",
"True",
"log",
".",
"error",
"(",
"\"Couldn't successfully authenticate your username & password..\"",
")",
"password",
"=",
"get_password",
"(",
"entry",
",",
"username",
",",
"prompt",
",",
"always_ask",
"=",
"True",
")",
"return",
"False"
] |
Validate a password with a check function & retry if the password is incorrect.
Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync.
:param str entry: The keychain entry to fetch a password from.
:param str username: The username to authenticate
:param func check_function: Check function to use. Should take (username, password, **check_args)
:param str password: The password to validate. If `None`, the user will be prompted.
:param int retries: Number of retries to prompt the user for.
:param bool save_on_success: Save the password if the validation was successful.
:param str prompt: Alternate prompt to use when asking for the user's password.
:returns: `True` on successful authentication. `False` otherwise.
:rtype: bool
|
[
"Validate",
"a",
"password",
"with",
"a",
"check",
"function",
"&",
"retry",
"if",
"the",
"password",
"is",
"incorrect",
"."
] |
ed4b1879321d858d6bc884d14fea7557372a4d41
|
https://github.com/litters/shrew/blob/ed4b1879321d858d6bc884d14fea7557372a4d41/shrew/utils/auth.py#L250-L283
|
241,649
|
mayfield/shellish
|
shellish/rendering/traceback.py
|
format_exception
|
def format_exception(exc, indent=0, pad=' '):
""" Take an exception object and return a generator with vtml formatted
exception traceback lines. """
from_msg = None
if exc.__cause__ is not None:
indent += yield from format_exception(exc.__cause__, indent)
from_msg = traceback._cause_message.strip()
elif exc.__context__ is not None and not exc.__suppress_context__:
indent += yield from format_exception(exc.__context__, indent)
from_msg = traceback._context_message.strip()
padding = pad * indent
if from_msg:
yield '\n%s%s\n' % (padding, from_msg)
yield '%s<b><u>Traceback (most recent call last)</u></b>' % padding
tblist = traceback.extract_tb(exc.__traceback__)
tbdepth = len(tblist)
for x in tblist:
depth = '%d.' % tbdepth
yield '%s<dim>%-3s</dim> <cyan>File</cyan> "<blue>%s</blue>", ' \
'line <u>%d</u>, in <b>%s</b>' % (padding, depth, x.filename,
x.lineno, x.name)
yield '%s %s' % (padding, x.line)
tbdepth -= 1
yield '%s<b><red>%s</red>: %s</b>' % (padding, type(exc).__name__, exc)
return indent + 1
|
python
|
def format_exception(exc, indent=0, pad=' '):
""" Take an exception object and return a generator with vtml formatted
exception traceback lines. """
from_msg = None
if exc.__cause__ is not None:
indent += yield from format_exception(exc.__cause__, indent)
from_msg = traceback._cause_message.strip()
elif exc.__context__ is not None and not exc.__suppress_context__:
indent += yield from format_exception(exc.__context__, indent)
from_msg = traceback._context_message.strip()
padding = pad * indent
if from_msg:
yield '\n%s%s\n' % (padding, from_msg)
yield '%s<b><u>Traceback (most recent call last)</u></b>' % padding
tblist = traceback.extract_tb(exc.__traceback__)
tbdepth = len(tblist)
for x in tblist:
depth = '%d.' % tbdepth
yield '%s<dim>%-3s</dim> <cyan>File</cyan> "<blue>%s</blue>", ' \
'line <u>%d</u>, in <b>%s</b>' % (padding, depth, x.filename,
x.lineno, x.name)
yield '%s %s' % (padding, x.line)
tbdepth -= 1
yield '%s<b><red>%s</red>: %s</b>' % (padding, type(exc).__name__, exc)
return indent + 1
|
[
"def",
"format_exception",
"(",
"exc",
",",
"indent",
"=",
"0",
",",
"pad",
"=",
"' '",
")",
":",
"from_msg",
"=",
"None",
"if",
"exc",
".",
"__cause__",
"is",
"not",
"None",
":",
"indent",
"+=",
"yield",
"from",
"format_exception",
"(",
"exc",
".",
"__cause__",
",",
"indent",
")",
"from_msg",
"=",
"traceback",
".",
"_cause_message",
".",
"strip",
"(",
")",
"elif",
"exc",
".",
"__context__",
"is",
"not",
"None",
"and",
"not",
"exc",
".",
"__suppress_context__",
":",
"indent",
"+=",
"yield",
"from",
"format_exception",
"(",
"exc",
".",
"__context__",
",",
"indent",
")",
"from_msg",
"=",
"traceback",
".",
"_context_message",
".",
"strip",
"(",
")",
"padding",
"=",
"pad",
"*",
"indent",
"if",
"from_msg",
":",
"yield",
"'\\n%s%s\\n'",
"%",
"(",
"padding",
",",
"from_msg",
")",
"yield",
"'%s<b><u>Traceback (most recent call last)</u></b>'",
"%",
"padding",
"tblist",
"=",
"traceback",
".",
"extract_tb",
"(",
"exc",
".",
"__traceback__",
")",
"tbdepth",
"=",
"len",
"(",
"tblist",
")",
"for",
"x",
"in",
"tblist",
":",
"depth",
"=",
"'%d.'",
"%",
"tbdepth",
"yield",
"'%s<dim>%-3s</dim> <cyan>File</cyan> \"<blue>%s</blue>\", '",
"'line <u>%d</u>, in <b>%s</b>'",
"%",
"(",
"padding",
",",
"depth",
",",
"x",
".",
"filename",
",",
"x",
".",
"lineno",
",",
"x",
".",
"name",
")",
"yield",
"'%s %s'",
"%",
"(",
"padding",
",",
"x",
".",
"line",
")",
"tbdepth",
"-=",
"1",
"yield",
"'%s<b><red>%s</red>: %s</b>'",
"%",
"(",
"padding",
",",
"type",
"(",
"exc",
")",
".",
"__name__",
",",
"exc",
")",
"return",
"indent",
"+",
"1"
] |
Take an exception object and return a generator with vtml formatted
exception traceback lines.
|
[
"Take",
"an",
"exception",
"object",
"and",
"return",
"a",
"generator",
"with",
"vtml",
"formatted",
"exception",
"traceback",
"lines",
"."
] |
df0f0e4612d138c34d8cb99b66ab5b8e47f1414a
|
https://github.com/mayfield/shellish/blob/df0f0e4612d138c34d8cb99b66ab5b8e47f1414a/shellish/rendering/traceback.py#L12-L36
|
241,650
|
mayfield/shellish
|
shellish/rendering/traceback.py
|
print_exception
|
def print_exception(*args, file=None, **kwargs):
""" Print the formatted output of an exception object. """
for line in format_exception(*args, **kwargs):
vtml.vtmlprint(line, file=file)
|
python
|
def print_exception(*args, file=None, **kwargs):
""" Print the formatted output of an exception object. """
for line in format_exception(*args, **kwargs):
vtml.vtmlprint(line, file=file)
|
[
"def",
"print_exception",
"(",
"*",
"args",
",",
"file",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"line",
"in",
"format_exception",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"vtml",
".",
"vtmlprint",
"(",
"line",
",",
"file",
"=",
"file",
")"
] |
Print the formatted output of an exception object.
|
[
"Print",
"the",
"formatted",
"output",
"of",
"an",
"exception",
"object",
"."
] |
df0f0e4612d138c34d8cb99b66ab5b8e47f1414a
|
https://github.com/mayfield/shellish/blob/df0f0e4612d138c34d8cb99b66ab5b8e47f1414a/shellish/rendering/traceback.py#L39-L42
|
241,651
|
1and1/infrascope
|
src/infrascope/configuration.py
|
Configuration.create
|
def create(cls, config_file=None):
""" Return the default configuration.
"""
if cls.instance is None:
cls.instance = cls(config_file)
# Load config file, possibly overwriting the defaults
cls.instance.load_ini()
if config_file and config_file != cls.instance.config_file:
raise RuntimeError("Configuration initialized a second time with a different file!")
return cls.instance
|
python
|
def create(cls, config_file=None):
""" Return the default configuration.
"""
if cls.instance is None:
cls.instance = cls(config_file)
# Load config file, possibly overwriting the defaults
cls.instance.load_ini()
if config_file and config_file != cls.instance.config_file:
raise RuntimeError("Configuration initialized a second time with a different file!")
return cls.instance
|
[
"def",
"create",
"(",
"cls",
",",
"config_file",
"=",
"None",
")",
":",
"if",
"cls",
".",
"instance",
"is",
"None",
":",
"cls",
".",
"instance",
"=",
"cls",
"(",
"config_file",
")",
"# Load config file, possibly overwriting the defaults",
"cls",
".",
"instance",
".",
"load_ini",
"(",
")",
"if",
"config_file",
"and",
"config_file",
"!=",
"cls",
".",
"instance",
".",
"config_file",
":",
"raise",
"RuntimeError",
"(",
"\"Configuration initialized a second time with a different file!\"",
")",
"return",
"cls",
".",
"instance"
] |
Return the default configuration.
|
[
"Return",
"the",
"default",
"configuration",
"."
] |
d7e291917e618a0a3cd6d5bfc20c6b5defd7550c
|
https://github.com/1and1/infrascope/blob/d7e291917e618a0a3cd6d5bfc20c6b5defd7550c/src/infrascope/configuration.py#L38-L50
|
241,652
|
1and1/infrascope
|
src/infrascope/configuration.py
|
Configuration.load_ini
|
def load_ini(self):
""" Load the given .INI file.
"""
if not self.config_file:
return
# Load INI file
ini_file = ConfigParser.SafeConfigParser()
if not ini_file.read(self.config_file):
raise ConfigParser.ParsingError("Global configuration file %r not found!" % (
self.config_file,
))
"""
# Make sure there's our global settings section
if not ini_file.has_section(self.SECTION):
raise ConfigParser.ParsingError("%r needs to have a [%s] section!" % (
self.config_file, self.SECTION,
))
# Get the given values
for key, val in ini_file.items(self.SECTION):
# Ensure that all names are known (to prevent uncaught typos)
if key not in self.KEYS:
raise ConfigParser.ParsingError("%r has an unknown key %s in the [%s] section!" % (
self.config_file, key, self.SECTION,
))
# Do some shell-like path expansion
val = os.path.expanduser(os.path.expandvars(val))
# Set as attribute for easy access
setattr(self, key, val)
"""
self._validate()
|
python
|
def load_ini(self):
""" Load the given .INI file.
"""
if not self.config_file:
return
# Load INI file
ini_file = ConfigParser.SafeConfigParser()
if not ini_file.read(self.config_file):
raise ConfigParser.ParsingError("Global configuration file %r not found!" % (
self.config_file,
))
"""
# Make sure there's our global settings section
if not ini_file.has_section(self.SECTION):
raise ConfigParser.ParsingError("%r needs to have a [%s] section!" % (
self.config_file, self.SECTION,
))
# Get the given values
for key, val in ini_file.items(self.SECTION):
# Ensure that all names are known (to prevent uncaught typos)
if key not in self.KEYS:
raise ConfigParser.ParsingError("%r has an unknown key %s in the [%s] section!" % (
self.config_file, key, self.SECTION,
))
# Do some shell-like path expansion
val = os.path.expanduser(os.path.expandvars(val))
# Set as attribute for easy access
setattr(self, key, val)
"""
self._validate()
|
[
"def",
"load_ini",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"config_file",
":",
"return",
"# Load INI file",
"ini_file",
"=",
"ConfigParser",
".",
"SafeConfigParser",
"(",
")",
"if",
"not",
"ini_file",
".",
"read",
"(",
"self",
".",
"config_file",
")",
":",
"raise",
"ConfigParser",
".",
"ParsingError",
"(",
"\"Global configuration file %r not found!\"",
"%",
"(",
"self",
".",
"config_file",
",",
")",
")",
"\"\"\"\n # Make sure there's our global settings section\n if not ini_file.has_section(self.SECTION):\n raise ConfigParser.ParsingError(\"%r needs to have a [%s] section!\" % (\n self.config_file, self.SECTION,\n ))\n\n # Get the given values\n for key, val in ini_file.items(self.SECTION):\n # Ensure that all names are known (to prevent uncaught typos)\n if key not in self.KEYS:\n raise ConfigParser.ParsingError(\"%r has an unknown key %s in the [%s] section!\" % (\n self.config_file, key, self.SECTION,\n ))\n\n # Do some shell-like path expansion\n val = os.path.expanduser(os.path.expandvars(val))\n\n # Set as attribute for easy access\n setattr(self, key, val)\n \"\"\"",
"self",
".",
"_validate",
"(",
")"
] |
Load the given .INI file.
|
[
"Load",
"the",
"given",
".",
"INI",
"file",
"."
] |
d7e291917e618a0a3cd6d5bfc20c6b5defd7550c
|
https://github.com/1and1/infrascope/blob/d7e291917e618a0a3cd6d5bfc20c6b5defd7550c/src/infrascope/configuration.py#L74-L109
|
241,653
|
logston/py3s3
|
py3s3/files.py
|
File.multiple_chunks
|
def multiple_chunks(self, chunk_size=None):
"""
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size
|
python
|
def multiple_chunks(self, chunk_size=None):
"""
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
"""
if not chunk_size:
chunk_size = self.DEFAULT_CHUNK_SIZE
return self.size > chunk_size
|
[
"def",
"multiple_chunks",
"(",
"self",
",",
"chunk_size",
"=",
"None",
")",
":",
"if",
"not",
"chunk_size",
":",
"chunk_size",
"=",
"self",
".",
"DEFAULT_CHUNK_SIZE",
"return",
"self",
".",
"size",
">",
"chunk_size"
] |
Returns ``True`` if you can expect multiple chunks.
NB: If a particular file representation is in memory, subclasses should
always return ``False`` -- there's no good reason to read from memory in
chunks.
|
[
"Returns",
"True",
"if",
"you",
"can",
"expect",
"multiple",
"chunks",
"."
] |
1910ca60c53a53d839d6f7b09c05b555f3bfccf4
|
https://github.com/logston/py3s3/blob/1910ca60c53a53d839d6f7b09c05b555f3bfccf4/py3s3/files.py#L113-L123
|
241,654
|
logston/py3s3
|
py3s3/files.py
|
S3ContentFile.content
|
def content(self, value):
"""
Set content to byte string, encoding if necessary
"""
if isinstance(value, bytes):
self._content = value
else:
self._content = value.encode(ENCODING)
self.size = len(value)
|
python
|
def content(self, value):
"""
Set content to byte string, encoding if necessary
"""
if isinstance(value, bytes):
self._content = value
else:
self._content = value.encode(ENCODING)
self.size = len(value)
|
[
"def",
"content",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"self",
".",
"_content",
"=",
"value",
"else",
":",
"self",
".",
"_content",
"=",
"value",
".",
"encode",
"(",
"ENCODING",
")",
"self",
".",
"size",
"=",
"len",
"(",
"value",
")"
] |
Set content to byte string, encoding if necessary
|
[
"Set",
"content",
"to",
"byte",
"string",
"encoding",
"if",
"necessary"
] |
1910ca60c53a53d839d6f7b09c05b555f3bfccf4
|
https://github.com/logston/py3s3/blob/1910ca60c53a53d839d6f7b09c05b555f3bfccf4/py3s3/files.py#L208-L216
|
241,655
|
logston/py3s3
|
py3s3/files.py
|
S3ContentFile.md5hash
|
def md5hash(self):
"""Return the MD5 hash string of the file content"""
digest = hashlib.md5(self.content).digest()
return b64_string(digest)
|
python
|
def md5hash(self):
"""Return the MD5 hash string of the file content"""
digest = hashlib.md5(self.content).digest()
return b64_string(digest)
|
[
"def",
"md5hash",
"(",
"self",
")",
":",
"digest",
"=",
"hashlib",
".",
"md5",
"(",
"self",
".",
"content",
")",
".",
"digest",
"(",
")",
"return",
"b64_string",
"(",
"digest",
")"
] |
Return the MD5 hash string of the file content
|
[
"Return",
"the",
"MD5",
"hash",
"string",
"of",
"the",
"file",
"content"
] |
1910ca60c53a53d839d6f7b09c05b555f3bfccf4
|
https://github.com/logston/py3s3/blob/1910ca60c53a53d839d6f7b09c05b555f3bfccf4/py3s3/files.py#L218-L221
|
241,656
|
logston/py3s3
|
py3s3/files.py
|
S3ContentFile.read
|
def read(self, chunk_size=None):
"""
Return chunk_size of bytes, starting from self.pos, from self.content.
"""
if chunk_size:
data = self.content[self.pos:self.pos + chunk_size]
self.pos += len(data)
return data
else:
return self.content
|
python
|
def read(self, chunk_size=None):
"""
Return chunk_size of bytes, starting from self.pos, from self.content.
"""
if chunk_size:
data = self.content[self.pos:self.pos + chunk_size]
self.pos += len(data)
return data
else:
return self.content
|
[
"def",
"read",
"(",
"self",
",",
"chunk_size",
"=",
"None",
")",
":",
"if",
"chunk_size",
":",
"data",
"=",
"self",
".",
"content",
"[",
"self",
".",
"pos",
":",
"self",
".",
"pos",
"+",
"chunk_size",
"]",
"self",
".",
"pos",
"+=",
"len",
"(",
"data",
")",
"return",
"data",
"else",
":",
"return",
"self",
".",
"content"
] |
Return chunk_size of bytes, starting from self.pos, from self.content.
|
[
"Return",
"chunk_size",
"of",
"bytes",
"starting",
"from",
"self",
".",
"pos",
"from",
"self",
".",
"content",
"."
] |
1910ca60c53a53d839d6f7b09c05b555f3bfccf4
|
https://github.com/logston/py3s3/blob/1910ca60c53a53d839d6f7b09c05b555f3bfccf4/py3s3/files.py#L226-L235
|
241,657
|
klmitch/appathy
|
appathy/utils.py
|
import_egg
|
def import_egg(string):
"""
Import a controller class from an egg. Uses the entry point group
"appathy.controller".
"""
# Split the string into a distribution and a name
dist, _sep, name = string.partition('#')
return pkg_resources.load_entry_point(dist, 'appathy.controller', name)
|
python
|
def import_egg(string):
"""
Import a controller class from an egg. Uses the entry point group
"appathy.controller".
"""
# Split the string into a distribution and a name
dist, _sep, name = string.partition('#')
return pkg_resources.load_entry_point(dist, 'appathy.controller', name)
|
[
"def",
"import_egg",
"(",
"string",
")",
":",
"# Split the string into a distribution and a name",
"dist",
",",
"_sep",
",",
"name",
"=",
"string",
".",
"partition",
"(",
"'#'",
")",
"return",
"pkg_resources",
".",
"load_entry_point",
"(",
"dist",
",",
"'appathy.controller'",
",",
"name",
")"
] |
Import a controller class from an egg. Uses the entry point group
"appathy.controller".
|
[
"Import",
"a",
"controller",
"class",
"from",
"an",
"egg",
".",
"Uses",
"the",
"entry",
"point",
"group",
"appathy",
".",
"controller",
"."
] |
a10aa7d21d38622e984a8fe106ab37114af90dc2
|
https://github.com/klmitch/appathy/blob/a10aa7d21d38622e984a8fe106ab37114af90dc2/appathy/utils.py#L57-L66
|
241,658
|
jalanb/pysyte
|
pysyte/iteration.py
|
first
|
def first(sequence, message=None):
"""The first item in that sequence
If there aren't any, raise a ValueError with that message
"""
try:
return next(iter(sequence))
except StopIteration:
raise ValueError(message or ('Sequence is empty: %s' % sequence))
|
python
|
def first(sequence, message=None):
"""The first item in that sequence
If there aren't any, raise a ValueError with that message
"""
try:
return next(iter(sequence))
except StopIteration:
raise ValueError(message or ('Sequence is empty: %s' % sequence))
|
[
"def",
"first",
"(",
"sequence",
",",
"message",
"=",
"None",
")",
":",
"try",
":",
"return",
"next",
"(",
"iter",
"(",
"sequence",
")",
")",
"except",
"StopIteration",
":",
"raise",
"ValueError",
"(",
"message",
"or",
"(",
"'Sequence is empty: %s'",
"%",
"sequence",
")",
")"
] |
The first item in that sequence
If there aren't any, raise a ValueError with that message
|
[
"The",
"first",
"item",
"in",
"that",
"sequence"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/iteration.py#L7-L15
|
241,659
|
jalanb/pysyte
|
pysyte/iteration.py
|
last
|
def last(sequence, message=None):
"""The last item in that sequence
If there aren't any, raise a ValueError with that message
"""
try:
return sequence.pop()
except AttributeError:
return list(sequence).pop()
except IndexError:
raise ValueError(message or f'Sequence is empty: {sequence}')
|
python
|
def last(sequence, message=None):
"""The last item in that sequence
If there aren't any, raise a ValueError with that message
"""
try:
return sequence.pop()
except AttributeError:
return list(sequence).pop()
except IndexError:
raise ValueError(message or f'Sequence is empty: {sequence}')
|
[
"def",
"last",
"(",
"sequence",
",",
"message",
"=",
"None",
")",
":",
"try",
":",
"return",
"sequence",
".",
"pop",
"(",
")",
"except",
"AttributeError",
":",
"return",
"list",
"(",
"sequence",
")",
".",
"pop",
"(",
")",
"except",
"IndexError",
":",
"raise",
"ValueError",
"(",
"message",
"or",
"f'Sequence is empty: {sequence}'",
")"
] |
The last item in that sequence
If there aren't any, raise a ValueError with that message
|
[
"The",
"last",
"item",
"in",
"that",
"sequence"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/iteration.py#L18-L28
|
241,660
|
jalanb/pysyte
|
pysyte/iteration.py
|
first_that
|
def first_that(predicate, sequence, message=None):
"""The first item in that sequence that matches that predicate
If none matches raise a KeyError with that message
"""
try:
return next(ifilter(predicate, sequence))
except StopIteration:
raise KeyError(message or 'Not Found')
|
python
|
def first_that(predicate, sequence, message=None):
"""The first item in that sequence that matches that predicate
If none matches raise a KeyError with that message
"""
try:
return next(ifilter(predicate, sequence))
except StopIteration:
raise KeyError(message or 'Not Found')
|
[
"def",
"first_that",
"(",
"predicate",
",",
"sequence",
",",
"message",
"=",
"None",
")",
":",
"try",
":",
"return",
"next",
"(",
"ifilter",
"(",
"predicate",
",",
"sequence",
")",
")",
"except",
"StopIteration",
":",
"raise",
"KeyError",
"(",
"message",
"or",
"'Not Found'",
")"
] |
The first item in that sequence that matches that predicate
If none matches raise a KeyError with that message
|
[
"The",
"first",
"item",
"in",
"that",
"sequence",
"that",
"matches",
"that",
"predicate"
] |
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
|
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/iteration.py#L38-L46
|
241,661
|
MacHu-GWU/angora-project
|
angora/math/img2waveform.py
|
expand_window
|
def expand_window(center, window_size, array_size):
"""Generate a bounded windows.
maxlength = 2 * window_size + 1, lower bound is 0 and upper bound is
``array_size - 1``.
Example::
>>> expand_window(center=50, window_size=3, max=100)
[47, 48, 49, 50, 51, 52, 53]
>>> expand_window(center=2, window_size=3, max=100)
[0, 1, 2, 3, 4, 5]
>>> expand_window(center=98, window_size=3, max=100)
[95, 96, 97, 98, 99]
"""
if center - window_size < 0:
lower = 0
else:
lower = center - window_size
if center + window_size + 1 > array_size:
upper = array_size
else:
upper = center + window_size + 1
return np.array(range(lower, upper))
|
python
|
def expand_window(center, window_size, array_size):
"""Generate a bounded windows.
maxlength = 2 * window_size + 1, lower bound is 0 and upper bound is
``array_size - 1``.
Example::
>>> expand_window(center=50, window_size=3, max=100)
[47, 48, 49, 50, 51, 52, 53]
>>> expand_window(center=2, window_size=3, max=100)
[0, 1, 2, 3, 4, 5]
>>> expand_window(center=98, window_size=3, max=100)
[95, 96, 97, 98, 99]
"""
if center - window_size < 0:
lower = 0
else:
lower = center - window_size
if center + window_size + 1 > array_size:
upper = array_size
else:
upper = center + window_size + 1
return np.array(range(lower, upper))
|
[
"def",
"expand_window",
"(",
"center",
",",
"window_size",
",",
"array_size",
")",
":",
"if",
"center",
"-",
"window_size",
"<",
"0",
":",
"lower",
"=",
"0",
"else",
":",
"lower",
"=",
"center",
"-",
"window_size",
"if",
"center",
"+",
"window_size",
"+",
"1",
">",
"array_size",
":",
"upper",
"=",
"array_size",
"else",
":",
"upper",
"=",
"center",
"+",
"window_size",
"+",
"1",
"return",
"np",
".",
"array",
"(",
"range",
"(",
"lower",
",",
"upper",
")",
")"
] |
Generate a bounded windows.
maxlength = 2 * window_size + 1, lower bound is 0 and upper bound is
``array_size - 1``.
Example::
>>> expand_window(center=50, window_size=3, max=100)
[47, 48, 49, 50, 51, 52, 53]
>>> expand_window(center=2, window_size=3, max=100)
[0, 1, 2, 3, 4, 5]
>>> expand_window(center=98, window_size=3, max=100)
[95, 96, 97, 98, 99]
|
[
"Generate",
"a",
"bounded",
"windows",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/math/img2waveform.py#L44-L69
|
241,662
|
MacHu-GWU/angora-project
|
angora/math/img2waveform.py
|
img2ascii
|
def img2ascii(img_path, ascii_path, ascii_char="*", pad=0):
"""Convert an image to ascii art text.
Suppose we have an image like that:
.. image:: images/rabbit.png
:align: left
Put some codes::
>>> from weatherlab.math.img2waveform import img2ascii
>>> img2ascii(r"testdata\img2waveform\rabbit.png",
... r"testdata\img2waveform\asciiart.txt", pad=0)
Then you will see this in asciiart.txt::
******
*** *** ****
** ** *********
** ** *** ***
** * ** **
** ** ** **
** * *** *
* ** ** **
** * ** **
** * ** *
* ** ** *
** ** * **
** * ** **
* * ** **
* ** * **
** ** ** **
** * ** **
** * * **
** * ** *
** * ** *
* ** ** *
* ** * *
* ** ** *
* ** ** *
** ** ** **
** * ** **
** * * **
** * * **
** * * **
* * ** **
* * ** *
** * ** *
** * ** *
** ** ** **
* ** ** **
* ** ** **
** ** ** *
** ** ** **
* ** ** **
** ** ** *
** ******* *
** ******* **
** **
** *
** **
*** *
**** ***
*** ***
** ****
** ***
** ***
** **
** **
* **
** **
** **
** **
** **
** **
** **
** **
* **
* **
** *
** **
* **
* **
** *
** *
** **
** **
** **
** **
** ** **
** *** *** **
* **** **** **
* *** **** **
** ** ** *
** *
** *
* **
** **
** *
* **
** **
** **
** *
** **
** **
** **
** *** ** **
** ****** ***
*** ****** **
*** * *** ***
*** ***
*** ***
**** ****
******** *******
*** ********** ******** ***
** *** ************ ********** *** * ***
** * **** *********************** *** ** ***
** * ** **** ** ******* * *** ***** ***
**** * * ***** ********** * **** * * ** **
*** * * ** * ******************************* * *** * **
** ***** * *** ********** ** ** ********** *** ** ***
** * ***** ** * ***** ** ** ***** * * ** * **
*** *** ************ ** ****** ** * * ** ** ** * ** ***
** ******* * * ** ** ** **** * ** * ** * **** **
** *** *** ******* ****** * ** * *** ***** *** ** ***** ** **
** * * ***** ************************************ * **** * **
*** ** ** *********************************************** *** ***
*** ** ****************************************** **** ** ** **
**** ** ** ******************************************** ** * **
** ****** ** ******************************************** ** * ***
** ***** *********************************************** ** ****
* *** ****************************** **************** *********
** ** *************************************** * * * ***** *
** ** ********************************************** *** *
* ** ** *********************************** ******* ** *
** ** ***************************************** *** ** *
*** ** * ********************************************** ** **
****** ************************************************ ** ***
**** *********************************************** ********
** *********************************************** ****
*** ** ******************************************* **
*** ** ***** ****** * * * * * ******** *** ** ** ***
*** * * **** **** **** * ** ** * *** ** ***
**** * * ** **** * *** ******** * *** *****
***** ** ** ** ** *** ** *** *****
******* * * ** * ** ********
*************** * *******************
****************************** ***
*** ********* **
** * **
** * **
** * **
** * **
** * **
** ** **
** ****** * ** *********
*************************************
**********
:param img_path: the image file path
:type img_path: str
:param ascii_path: the output ascii text file path
:type ascii_path: str
:param pad: how many space been filled in between two pixels
:type pad: int
"""
if len(ascii_char) != 1:
raise Exception("ascii_char has to be single character.")
image = Image.open(img_path).convert("L")
matrix = np.array(image)
# you can customize the gray scale fix behavior to fit color image
matrix[np.where(matrix >= 128)] = 255
matrix[np.where(matrix < 128)] = 0
lines = list()
for vector in matrix:
line = list()
for i in vector:
line.append(" " * pad)
if i:
line.append(" ")
else:
line.append(ascii_char)
lines.append("".join(line))
with open(ascii_path, "w") as f:
f.write("\n".join(lines))
|
python
|
def img2ascii(img_path, ascii_path, ascii_char="*", pad=0):
"""Convert an image to ascii art text.
Suppose we have an image like that:
.. image:: images/rabbit.png
:align: left
Put some codes::
>>> from weatherlab.math.img2waveform import img2ascii
>>> img2ascii(r"testdata\img2waveform\rabbit.png",
... r"testdata\img2waveform\asciiart.txt", pad=0)
Then you will see this in asciiart.txt::
******
*** *** ****
** ** *********
** ** *** ***
** * ** **
** ** ** **
** * *** *
* ** ** **
** * ** **
** * ** *
* ** ** *
** ** * **
** * ** **
* * ** **
* ** * **
** ** ** **
** * ** **
** * * **
** * ** *
** * ** *
* ** ** *
* ** * *
* ** ** *
* ** ** *
** ** ** **
** * ** **
** * * **
** * * **
** * * **
* * ** **
* * ** *
** * ** *
** * ** *
** ** ** **
* ** ** **
* ** ** **
** ** ** *
** ** ** **
* ** ** **
** ** ** *
** ******* *
** ******* **
** **
** *
** **
*** *
**** ***
*** ***
** ****
** ***
** ***
** **
** **
* **
** **
** **
** **
** **
** **
** **
** **
* **
* **
** *
** **
* **
* **
** *
** *
** **
** **
** **
** **
** ** **
** *** *** **
* **** **** **
* *** **** **
** ** ** *
** *
** *
* **
** **
** *
* **
** **
** **
** *
** **
** **
** **
** *** ** **
** ****** ***
*** ****** **
*** * *** ***
*** ***
*** ***
**** ****
******** *******
*** ********** ******** ***
** *** ************ ********** *** * ***
** * **** *********************** *** ** ***
** * ** **** ** ******* * *** ***** ***
**** * * ***** ********** * **** * * ** **
*** * * ** * ******************************* * *** * **
** ***** * *** ********** ** ** ********** *** ** ***
** * ***** ** * ***** ** ** ***** * * ** * **
*** *** ************ ** ****** ** * * ** ** ** * ** ***
** ******* * * ** ** ** **** * ** * ** * **** **
** *** *** ******* ****** * ** * *** ***** *** ** ***** ** **
** * * ***** ************************************ * **** * **
*** ** ** *********************************************** *** ***
*** ** ****************************************** **** ** ** **
**** ** ** ******************************************** ** * **
** ****** ** ******************************************** ** * ***
** ***** *********************************************** ** ****
* *** ****************************** **************** *********
** ** *************************************** * * * ***** *
** ** ********************************************** *** *
* ** ** *********************************** ******* ** *
** ** ***************************************** *** ** *
*** ** * ********************************************** ** **
****** ************************************************ ** ***
**** *********************************************** ********
** *********************************************** ****
*** ** ******************************************* **
*** ** ***** ****** * * * * * ******** *** ** ** ***
*** * * **** **** **** * ** ** * *** ** ***
**** * * ** **** * *** ******** * *** *****
***** ** ** ** ** *** ** *** *****
******* * * ** * ** ********
*************** * *******************
****************************** ***
*** ********* **
** * **
** * **
** * **
** * **
** * **
** ** **
** ****** * ** *********
*************************************
**********
:param img_path: the image file path
:type img_path: str
:param ascii_path: the output ascii text file path
:type ascii_path: str
:param pad: how many space been filled in between two pixels
:type pad: int
"""
if len(ascii_char) != 1:
raise Exception("ascii_char has to be single character.")
image = Image.open(img_path).convert("L")
matrix = np.array(image)
# you can customize the gray scale fix behavior to fit color image
matrix[np.where(matrix >= 128)] = 255
matrix[np.where(matrix < 128)] = 0
lines = list()
for vector in matrix:
line = list()
for i in vector:
line.append(" " * pad)
if i:
line.append(" ")
else:
line.append(ascii_char)
lines.append("".join(line))
with open(ascii_path, "w") as f:
f.write("\n".join(lines))
|
[
"def",
"img2ascii",
"(",
"img_path",
",",
"ascii_path",
",",
"ascii_char",
"=",
"\"*\"",
",",
"pad",
"=",
"0",
")",
":",
"if",
"len",
"(",
"ascii_char",
")",
"!=",
"1",
":",
"raise",
"Exception",
"(",
"\"ascii_char has to be single character.\"",
")",
"image",
"=",
"Image",
".",
"open",
"(",
"img_path",
")",
".",
"convert",
"(",
"\"L\"",
")",
"matrix",
"=",
"np",
".",
"array",
"(",
"image",
")",
"# you can customize the gray scale fix behavior to fit color image",
"matrix",
"[",
"np",
".",
"where",
"(",
"matrix",
">=",
"128",
")",
"]",
"=",
"255",
"matrix",
"[",
"np",
".",
"where",
"(",
"matrix",
"<",
"128",
")",
"]",
"=",
"0",
"lines",
"=",
"list",
"(",
")",
"for",
"vector",
"in",
"matrix",
":",
"line",
"=",
"list",
"(",
")",
"for",
"i",
"in",
"vector",
":",
"line",
".",
"append",
"(",
"\" \"",
"*",
"pad",
")",
"if",
"i",
":",
"line",
".",
"append",
"(",
"\" \"",
")",
"else",
":",
"line",
".",
"append",
"(",
"ascii_char",
")",
"lines",
".",
"append",
"(",
"\"\"",
".",
"join",
"(",
"line",
")",
")",
"with",
"open",
"(",
"ascii_path",
",",
"\"w\"",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"\"\\n\"",
".",
"join",
"(",
"lines",
")",
")"
] |
Convert an image to ascii art text.
Suppose we have an image like that:
.. image:: images/rabbit.png
:align: left
Put some codes::
>>> from weatherlab.math.img2waveform import img2ascii
>>> img2ascii(r"testdata\img2waveform\rabbit.png",
... r"testdata\img2waveform\asciiart.txt", pad=0)
Then you will see this in asciiart.txt::
******
*** *** ****
** ** *********
** ** *** ***
** * ** **
** ** ** **
** * *** *
* ** ** **
** * ** **
** * ** *
* ** ** *
** ** * **
** * ** **
* * ** **
* ** * **
** ** ** **
** * ** **
** * * **
** * ** *
** * ** *
* ** ** *
* ** * *
* ** ** *
* ** ** *
** ** ** **
** * ** **
** * * **
** * * **
** * * **
* * ** **
* * ** *
** * ** *
** * ** *
** ** ** **
* ** ** **
* ** ** **
** ** ** *
** ** ** **
* ** ** **
** ** ** *
** ******* *
** ******* **
** **
** *
** **
*** *
**** ***
*** ***
** ****
** ***
** ***
** **
** **
* **
** **
** **
** **
** **
** **
** **
** **
* **
* **
** *
** **
* **
* **
** *
** *
** **
** **
** **
** **
** ** **
** *** *** **
* **** **** **
* *** **** **
** ** ** *
** *
** *
* **
** **
** *
* **
** **
** **
** *
** **
** **
** **
** *** ** **
** ****** ***
*** ****** **
*** * *** ***
*** ***
*** ***
**** ****
******** *******
*** ********** ******** ***
** *** ************ ********** *** * ***
** * **** *********************** *** ** ***
** * ** **** ** ******* * *** ***** ***
**** * * ***** ********** * **** * * ** **
*** * * ** * ******************************* * *** * **
** ***** * *** ********** ** ** ********** *** ** ***
** * ***** ** * ***** ** ** ***** * * ** * **
*** *** ************ ** ****** ** * * ** ** ** * ** ***
** ******* * * ** ** ** **** * ** * ** * **** **
** *** *** ******* ****** * ** * *** ***** *** ** ***** ** **
** * * ***** ************************************ * **** * **
*** ** ** *********************************************** *** ***
*** ** ****************************************** **** ** ** **
**** ** ** ******************************************** ** * **
** ****** ** ******************************************** ** * ***
** ***** *********************************************** ** ****
* *** ****************************** **************** *********
** ** *************************************** * * * ***** *
** ** ********************************************** *** *
* ** ** *********************************** ******* ** *
** ** ***************************************** *** ** *
*** ** * ********************************************** ** **
****** ************************************************ ** ***
**** *********************************************** ********
** *********************************************** ****
*** ** ******************************************* **
*** ** ***** ****** * * * * * ******** *** ** ** ***
*** * * **** **** **** * ** ** * *** ** ***
**** * * ** **** * *** ******** * *** *****
***** ** ** ** ** *** ** *** *****
******* * * ** * ** ********
*************** * *******************
****************************** ***
*** ********* **
** * **
** * **
** * **
** * **
** * **
** ** **
** ****** * ** *********
*************************************
**********
:param img_path: the image file path
:type img_path: str
:param ascii_path: the output ascii text file path
:type ascii_path: str
:param pad: how many space been filled in between two pixels
:type pad: int
|
[
"Convert",
"an",
"image",
"to",
"ascii",
"art",
"text",
"."
] |
689a60da51cd88680ddbe26e28dbe81e6b01d275
|
https://github.com/MacHu-GWU/angora-project/blob/689a60da51cd88680ddbe26e28dbe81e6b01d275/angora/math/img2waveform.py#L72-L261
|
241,663
|
Fuyukai/ConfigMaster
|
configmaster/ConfigFile.py
|
ConfigObject.initial_populate
|
def initial_populate(self, data):
"""
Populate a newly created config object with data.
If it was populated, this returns True. If it wasn't, this returns False.
It is recommended to run a .dump() and .reload() after running this.
"""
if self.config.parsed:
return False
# Otherwise, create a new ConfigKey.
self.config.load_from_dict(data)
return True
|
python
|
def initial_populate(self, data):
"""
Populate a newly created config object with data.
If it was populated, this returns True. If it wasn't, this returns False.
It is recommended to run a .dump() and .reload() after running this.
"""
if self.config.parsed:
return False
# Otherwise, create a new ConfigKey.
self.config.load_from_dict(data)
return True
|
[
"def",
"initial_populate",
"(",
"self",
",",
"data",
")",
":",
"if",
"self",
".",
"config",
".",
"parsed",
":",
"return",
"False",
"# Otherwise, create a new ConfigKey.",
"self",
".",
"config",
".",
"load_from_dict",
"(",
"data",
")",
"return",
"True"
] |
Populate a newly created config object with data.
If it was populated, this returns True. If it wasn't, this returns False.
It is recommended to run a .dump() and .reload() after running this.
|
[
"Populate",
"a",
"newly",
"created",
"config",
"object",
"with",
"data",
"."
] |
8018aa415da55c84edaa8a49664f674758a14edd
|
https://github.com/Fuyukai/ConfigMaster/blob/8018aa415da55c84edaa8a49664f674758a14edd/configmaster/ConfigFile.py#L67-L79
|
241,664
|
Fuyukai/ConfigMaster
|
configmaster/ConfigFile.py
|
ConfigObject.apply_defaults
|
def apply_defaults(self, other_config):
"""
Applies default values from a different ConfigObject or ConfigKey object to this ConfigObject.
If there are any values in this object that are also in the default object, it will use the values from this object.
"""
if isinstance(other_config, self.__class__):
self.config.load_from_dict(other_config.config, overwrite=False)
else:
self.config.load_from_dict(other_config, overwrite=False)
|
python
|
def apply_defaults(self, other_config):
"""
Applies default values from a different ConfigObject or ConfigKey object to this ConfigObject.
If there are any values in this object that are also in the default object, it will use the values from this object.
"""
if isinstance(other_config, self.__class__):
self.config.load_from_dict(other_config.config, overwrite=False)
else:
self.config.load_from_dict(other_config, overwrite=False)
|
[
"def",
"apply_defaults",
"(",
"self",
",",
"other_config",
")",
":",
"if",
"isinstance",
"(",
"other_config",
",",
"self",
".",
"__class__",
")",
":",
"self",
".",
"config",
".",
"load_from_dict",
"(",
"other_config",
".",
"config",
",",
"overwrite",
"=",
"False",
")",
"else",
":",
"self",
".",
"config",
".",
"load_from_dict",
"(",
"other_config",
",",
"overwrite",
"=",
"False",
")"
] |
Applies default values from a different ConfigObject or ConfigKey object to this ConfigObject.
If there are any values in this object that are also in the default object, it will use the values from this object.
|
[
"Applies",
"default",
"values",
"from",
"a",
"different",
"ConfigObject",
"or",
"ConfigKey",
"object",
"to",
"this",
"ConfigObject",
"."
] |
8018aa415da55c84edaa8a49664f674758a14edd
|
https://github.com/Fuyukai/ConfigMaster/blob/8018aa415da55c84edaa8a49664f674758a14edd/configmaster/ConfigFile.py#L81-L90
|
241,665
|
Fuyukai/ConfigMaster
|
configmaster/ConfigFile.py
|
ConfigFile.reload
|
def reload(self):
"""
Automatically reloads the config file.
This is just an alias for self.load()."""
if not self.fd.closed: self.fd.close()
self.fd = open(self.fd.name, 'r')
self.load()
|
python
|
def reload(self):
"""
Automatically reloads the config file.
This is just an alias for self.load()."""
if not self.fd.closed: self.fd.close()
self.fd = open(self.fd.name, 'r')
self.load()
|
[
"def",
"reload",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"fd",
".",
"closed",
":",
"self",
".",
"fd",
".",
"close",
"(",
")",
"self",
".",
"fd",
"=",
"open",
"(",
"self",
".",
"fd",
".",
"name",
",",
"'r'",
")",
"self",
".",
"load",
"(",
")"
] |
Automatically reloads the config file.
This is just an alias for self.load().
|
[
"Automatically",
"reloads",
"the",
"config",
"file",
"."
] |
8018aa415da55c84edaa8a49664f674758a14edd
|
https://github.com/Fuyukai/ConfigMaster/blob/8018aa415da55c84edaa8a49664f674758a14edd/configmaster/ConfigFile.py#L155-L164
|
241,666
|
Fuyukai/ConfigMaster
|
configmaster/ConfigFile.py
|
NetworkedConfigObject.save_to_file
|
def save_to_file(self, filename: str) -> ConfigFile:
"""
This converts the NetworkedConfigFile into a normal ConfigFile object.
This requires the normal class hooks to be provided.
"""
newclass = ConfigFile(fd=filename, load_hook=self.normal_class_hook[0],
dump_hook=self.normal_class_hook[1], safe_load=self.safe_load)
return newclass
|
python
|
def save_to_file(self, filename: str) -> ConfigFile:
"""
This converts the NetworkedConfigFile into a normal ConfigFile object.
This requires the normal class hooks to be provided.
"""
newclass = ConfigFile(fd=filename, load_hook=self.normal_class_hook[0],
dump_hook=self.normal_class_hook[1], safe_load=self.safe_load)
return newclass
|
[
"def",
"save_to_file",
"(",
"self",
",",
"filename",
":",
"str",
")",
"->",
"ConfigFile",
":",
"newclass",
"=",
"ConfigFile",
"(",
"fd",
"=",
"filename",
",",
"load_hook",
"=",
"self",
".",
"normal_class_hook",
"[",
"0",
"]",
",",
"dump_hook",
"=",
"self",
".",
"normal_class_hook",
"[",
"1",
"]",
",",
"safe_load",
"=",
"self",
".",
"safe_load",
")",
"return",
"newclass"
] |
This converts the NetworkedConfigFile into a normal ConfigFile object.
This requires the normal class hooks to be provided.
|
[
"This",
"converts",
"the",
"NetworkedConfigFile",
"into",
"a",
"normal",
"ConfigFile",
"object",
"."
] |
8018aa415da55c84edaa8a49664f674758a14edd
|
https://github.com/Fuyukai/ConfigMaster/blob/8018aa415da55c84edaa8a49664f674758a14edd/configmaster/ConfigFile.py#L203-L211
|
241,667
|
robertchase/ergaleia
|
ergaleia/to_args.py
|
to_args
|
def to_args(s):
""" parse a string into args and kwargs
the input is a blank-delimited set of tokens, which may be grouped
as strings (tick or double tick delimited) with embedded blanks.
a non-string equal (=) acts as a delimiter between key-value pairs.
the initial tokens are treated as args, followed by key-value pairs.
Example:
one 'two three' four=5 six='seven eight'
parses to:
args = ['one', 'two three']
kwargs = {'four': 5, 'six': 'seven eight'}
Return:
args as list
kwargs as dict
Notes:
1. Does not enforce args and keywords as valid python.
2. String delimiters can be escaped (\) within strings.
3. Key-value delimiters (=) can be surrounded by blanks.
4. Non-string integer kwarg values will be int; all other
values are str.
5. Designed for functionality, not speed
"""
args = []
kwargs = {}
state = 'arg'
for token in to_tokens(s):
if state == 'arg':
if token.is_key:
key = token.value
state = 'value'
else:
args.append(token.value)
elif state == 'key':
if not token.is_key:
raise ExpectingKey(token.value)
key = token.value
if key in kwargs:
raise DuplicateKey(key)
state = 'value'
elif state == 'value':
if token.is_key:
raise ConsecutiveKeys(token.value)
kwargs[key] = token.value
state = 'key'
if state == 'value':
raise IncompleteKeyValue(key)
return args, kwargs
|
python
|
def to_args(s):
""" parse a string into args and kwargs
the input is a blank-delimited set of tokens, which may be grouped
as strings (tick or double tick delimited) with embedded blanks.
a non-string equal (=) acts as a delimiter between key-value pairs.
the initial tokens are treated as args, followed by key-value pairs.
Example:
one 'two three' four=5 six='seven eight'
parses to:
args = ['one', 'two three']
kwargs = {'four': 5, 'six': 'seven eight'}
Return:
args as list
kwargs as dict
Notes:
1. Does not enforce args and keywords as valid python.
2. String delimiters can be escaped (\) within strings.
3. Key-value delimiters (=) can be surrounded by blanks.
4. Non-string integer kwarg values will be int; all other
values are str.
5. Designed for functionality, not speed
"""
args = []
kwargs = {}
state = 'arg'
for token in to_tokens(s):
if state == 'arg':
if token.is_key:
key = token.value
state = 'value'
else:
args.append(token.value)
elif state == 'key':
if not token.is_key:
raise ExpectingKey(token.value)
key = token.value
if key in kwargs:
raise DuplicateKey(key)
state = 'value'
elif state == 'value':
if token.is_key:
raise ConsecutiveKeys(token.value)
kwargs[key] = token.value
state = 'key'
if state == 'value':
raise IncompleteKeyValue(key)
return args, kwargs
|
[
"def",
"to_args",
"(",
"s",
")",
":",
"args",
"=",
"[",
"]",
"kwargs",
"=",
"{",
"}",
"state",
"=",
"'arg'",
"for",
"token",
"in",
"to_tokens",
"(",
"s",
")",
":",
"if",
"state",
"==",
"'arg'",
":",
"if",
"token",
".",
"is_key",
":",
"key",
"=",
"token",
".",
"value",
"state",
"=",
"'value'",
"else",
":",
"args",
".",
"append",
"(",
"token",
".",
"value",
")",
"elif",
"state",
"==",
"'key'",
":",
"if",
"not",
"token",
".",
"is_key",
":",
"raise",
"ExpectingKey",
"(",
"token",
".",
"value",
")",
"key",
"=",
"token",
".",
"value",
"if",
"key",
"in",
"kwargs",
":",
"raise",
"DuplicateKey",
"(",
"key",
")",
"state",
"=",
"'value'",
"elif",
"state",
"==",
"'value'",
":",
"if",
"token",
".",
"is_key",
":",
"raise",
"ConsecutiveKeys",
"(",
"token",
".",
"value",
")",
"kwargs",
"[",
"key",
"]",
"=",
"token",
".",
"value",
"state",
"=",
"'key'",
"if",
"state",
"==",
"'value'",
":",
"raise",
"IncompleteKeyValue",
"(",
"key",
")",
"return",
"args",
",",
"kwargs"
] |
parse a string into args and kwargs
the input is a blank-delimited set of tokens, which may be grouped
as strings (tick or double tick delimited) with embedded blanks.
a non-string equal (=) acts as a delimiter between key-value pairs.
the initial tokens are treated as args, followed by key-value pairs.
Example:
one 'two three' four=5 six='seven eight'
parses to:
args = ['one', 'two three']
kwargs = {'four': 5, 'six': 'seven eight'}
Return:
args as list
kwargs as dict
Notes:
1. Does not enforce args and keywords as valid python.
2. String delimiters can be escaped (\) within strings.
3. Key-value delimiters (=) can be surrounded by blanks.
4. Non-string integer kwarg values will be int; all other
values are str.
5. Designed for functionality, not speed
|
[
"parse",
"a",
"string",
"into",
"args",
"and",
"kwargs"
] |
df8e9a4b18c563022a503faa27e822c9a5755490
|
https://github.com/robertchase/ergaleia/blob/df8e9a4b18c563022a503faa27e822c9a5755490/ergaleia/to_args.py#L8-L70
|
241,668
|
klmitch/appathy
|
appathy/application.py
|
Application.dispatch
|
def dispatch(self, req):
"""
Called by the Routes middleware to dispatch the request to the
appropriate controller. If a webob exception is raised, it is
returned; if some other exception is raised, the webob
`HTTPInternalServerError` exception is raised. Otherwise, the
return value of the controller is returned.
"""
# Grab the request parameters
params = req.environ['wsgiorg.routing_args'][1]
# What controller is authoritative?
controller = params.pop('controller')
# Determine its name
cont_class = controller.__class__
cont_name = "%s:%s" % (cont_class.__module__, cont_class.__name__)
# Determine the origin of the request
origin = req.remote_addr if req.remote_addr else '[local]'
if req.remote_user:
origin = '%s (%s)' % (origin, req.remote_user)
# Log that we're processing the request
LOG.info("%s %s %s (controller %r)" %
(origin, req.method, req.url, cont_name))
# Call into that controller
try:
return controller(req, params)
except webob.exc.HTTPException as e:
# Return the HTTP exception directly
return e
except exceptions.AppathyResponse as e:
# Return the webob.Response directly
return e.response
except Exception as e:
# Log the controller exception
LOG.exception("Exception occurred in controller %r" % cont_name)
# These exceptions result in a 500. Note we're
# intentionally not including the exception message, since
# it could contain sensitive data.
return webob.exc.HTTPInternalServerError()
|
python
|
def dispatch(self, req):
"""
Called by the Routes middleware to dispatch the request to the
appropriate controller. If a webob exception is raised, it is
returned; if some other exception is raised, the webob
`HTTPInternalServerError` exception is raised. Otherwise, the
return value of the controller is returned.
"""
# Grab the request parameters
params = req.environ['wsgiorg.routing_args'][1]
# What controller is authoritative?
controller = params.pop('controller')
# Determine its name
cont_class = controller.__class__
cont_name = "%s:%s" % (cont_class.__module__, cont_class.__name__)
# Determine the origin of the request
origin = req.remote_addr if req.remote_addr else '[local]'
if req.remote_user:
origin = '%s (%s)' % (origin, req.remote_user)
# Log that we're processing the request
LOG.info("%s %s %s (controller %r)" %
(origin, req.method, req.url, cont_name))
# Call into that controller
try:
return controller(req, params)
except webob.exc.HTTPException as e:
# Return the HTTP exception directly
return e
except exceptions.AppathyResponse as e:
# Return the webob.Response directly
return e.response
except Exception as e:
# Log the controller exception
LOG.exception("Exception occurred in controller %r" % cont_name)
# These exceptions result in a 500. Note we're
# intentionally not including the exception message, since
# it could contain sensitive data.
return webob.exc.HTTPInternalServerError()
|
[
"def",
"dispatch",
"(",
"self",
",",
"req",
")",
":",
"# Grab the request parameters",
"params",
"=",
"req",
".",
"environ",
"[",
"'wsgiorg.routing_args'",
"]",
"[",
"1",
"]",
"# What controller is authoritative?",
"controller",
"=",
"params",
".",
"pop",
"(",
"'controller'",
")",
"# Determine its name",
"cont_class",
"=",
"controller",
".",
"__class__",
"cont_name",
"=",
"\"%s:%s\"",
"%",
"(",
"cont_class",
".",
"__module__",
",",
"cont_class",
".",
"__name__",
")",
"# Determine the origin of the request",
"origin",
"=",
"req",
".",
"remote_addr",
"if",
"req",
".",
"remote_addr",
"else",
"'[local]'",
"if",
"req",
".",
"remote_user",
":",
"origin",
"=",
"'%s (%s)'",
"%",
"(",
"origin",
",",
"req",
".",
"remote_user",
")",
"# Log that we're processing the request",
"LOG",
".",
"info",
"(",
"\"%s %s %s (controller %r)\"",
"%",
"(",
"origin",
",",
"req",
".",
"method",
",",
"req",
".",
"url",
",",
"cont_name",
")",
")",
"# Call into that controller",
"try",
":",
"return",
"controller",
"(",
"req",
",",
"params",
")",
"except",
"webob",
".",
"exc",
".",
"HTTPException",
"as",
"e",
":",
"# Return the HTTP exception directly",
"return",
"e",
"except",
"exceptions",
".",
"AppathyResponse",
"as",
"e",
":",
"# Return the webob.Response directly",
"return",
"e",
".",
"response",
"except",
"Exception",
"as",
"e",
":",
"# Log the controller exception",
"LOG",
".",
"exception",
"(",
"\"Exception occurred in controller %r\"",
"%",
"cont_name",
")",
"# These exceptions result in a 500. Note we're",
"# intentionally not including the exception message, since",
"# it could contain sensitive data.",
"return",
"webob",
".",
"exc",
".",
"HTTPInternalServerError",
"(",
")"
] |
Called by the Routes middleware to dispatch the request to the
appropriate controller. If a webob exception is raised, it is
returned; if some other exception is raised, the webob
`HTTPInternalServerError` exception is raised. Otherwise, the
return value of the controller is returned.
|
[
"Called",
"by",
"the",
"Routes",
"middleware",
"to",
"dispatch",
"the",
"request",
"to",
"the",
"appropriate",
"controller",
".",
"If",
"a",
"webob",
"exception",
"is",
"raised",
"it",
"is",
"returned",
";",
"if",
"some",
"other",
"exception",
"is",
"raised",
"the",
"webob",
"HTTPInternalServerError",
"exception",
"is",
"raised",
".",
"Otherwise",
"the",
"return",
"value",
"of",
"the",
"controller",
"is",
"returned",
"."
] |
a10aa7d21d38622e984a8fe106ab37114af90dc2
|
https://github.com/klmitch/appathy/blob/a10aa7d21d38622e984a8fe106ab37114af90dc2/appathy/application.py#L110-L154
|
241,669
|
shaded-enmity/docker-hica
|
base/hica_base.py
|
HicaLabelStore.query
|
def query(self, ns, selector='*'):
""" Query the label store for labels
:param ns: Label namespace (`bind_pwd` for example)
:type ns: str
:param selector: Target selector (`test` or `test.guest` for example)
:type selector: str
"""
q, r = HicaLabelStore.PREFIX + '.' + ns, []
for (key, value) in self.items:
if not selector and key == q:
r.append((key, value))
if key.startswith(q) and key != q:
sub = key[len(q):]
m = re.match('.' + selector, sub)
if m:
r.append((key, value))
return r
|
python
|
def query(self, ns, selector='*'):
""" Query the label store for labels
:param ns: Label namespace (`bind_pwd` for example)
:type ns: str
:param selector: Target selector (`test` or `test.guest` for example)
:type selector: str
"""
q, r = HicaLabelStore.PREFIX + '.' + ns, []
for (key, value) in self.items:
if not selector and key == q:
r.append((key, value))
if key.startswith(q) and key != q:
sub = key[len(q):]
m = re.match('.' + selector, sub)
if m:
r.append((key, value))
return r
|
[
"def",
"query",
"(",
"self",
",",
"ns",
",",
"selector",
"=",
"'*'",
")",
":",
"q",
",",
"r",
"=",
"HicaLabelStore",
".",
"PREFIX",
"+",
"'.'",
"+",
"ns",
",",
"[",
"]",
"for",
"(",
"key",
",",
"value",
")",
"in",
"self",
".",
"items",
":",
"if",
"not",
"selector",
"and",
"key",
"==",
"q",
":",
"r",
".",
"append",
"(",
"(",
"key",
",",
"value",
")",
")",
"if",
"key",
".",
"startswith",
"(",
"q",
")",
"and",
"key",
"!=",
"q",
":",
"sub",
"=",
"key",
"[",
"len",
"(",
"q",
")",
":",
"]",
"m",
"=",
"re",
".",
"match",
"(",
"'.'",
"+",
"selector",
",",
"sub",
")",
"if",
"m",
":",
"r",
".",
"append",
"(",
"(",
"key",
",",
"value",
")",
")",
"return",
"r"
] |
Query the label store for labels
:param ns: Label namespace (`bind_pwd` for example)
:type ns: str
:param selector: Target selector (`test` or `test.guest` for example)
:type selector: str
|
[
"Query",
"the",
"label",
"store",
"for",
"labels"
] |
bc425586297e1eb228b70ee6fca8c499849ec87d
|
https://github.com/shaded-enmity/docker-hica/blob/bc425586297e1eb228b70ee6fca8c499849ec87d/base/hica_base.py#L63-L81
|
241,670
|
shaded-enmity/docker-hica
|
base/hica_base.py
|
HicaLabelStore.get_value
|
def get_value(self, label):
""" Get value from a single fully-qualified name """
for (key, value) in self.items:
if key == label:
return value
|
python
|
def get_value(self, label):
""" Get value from a single fully-qualified name """
for (key, value) in self.items:
if key == label:
return value
|
[
"def",
"get_value",
"(",
"self",
",",
"label",
")",
":",
"for",
"(",
"key",
",",
"value",
")",
"in",
"self",
".",
"items",
":",
"if",
"key",
"==",
"label",
":",
"return",
"value"
] |
Get value from a single fully-qualified name
|
[
"Get",
"value",
"from",
"a",
"single",
"fully",
"-",
"qualified",
"name"
] |
bc425586297e1eb228b70ee6fca8c499849ec87d
|
https://github.com/shaded-enmity/docker-hica/blob/bc425586297e1eb228b70ee6fca8c499849ec87d/base/hica_base.py#L87-L91
|
241,671
|
vilmibm/done
|
parsedatetime/parsedatetime_consts.py
|
_initPatterns
|
def _initPatterns(ptc):
"""
Helper function to take the different localized bits from ptc and
create the regex strings.
"""
# TODO add code to parse the date formats and build the regexes up from sub-parts
# TODO find all hard-coded uses of date/time seperators
ptc.RE_DATE4 = r'''(?P<date>(((?P<day>\d\d?)(?P<suffix>%(daysuffix)s)?(,)?(\s)?)
(?P<mthname>(%(months)s|%(shortmonths)s))\s?
(?P<year>\d\d(\d\d)?)?
)
)''' % ptc.re_values
# I refactored DATE3 to fix Issue 16 http://code.google.com/p/parsedatetime/issues/detail?id=16
# I suspect the final line was for a trailing time - but testing shows it's not needed
# ptc.RE_DATE3 = r'''(?P<date>((?P<mthname>(%(months)s|%(shortmonths)s))\s?
# ((?P<day>\d\d?)(\s?|%(daysuffix)s|$)+)?
# (,\s?(?P<year>\d\d(\d\d)?))?))
# (\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_DATE3 = r'''(?P<date>(
(((?P<mthname>(%(months)s|%(shortmonths)s))|
((?P<day>\d\d?)(?P<suffix>%(daysuffix)s)?))(\s)?){1,2}
((,)?(\s)?(?P<year>\d\d(\d\d)?))?
)
)''' % ptc.re_values
ptc.RE_MONTH = r'''(\s?|^)
(?P<month>(
(?P<mthname>(%(months)s|%(shortmonths)s))
(\s?(?P<year>(\d\d\d\d)))?
))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_WEEKDAY = r'''(\s?|^)
(?P<weekday>(%(days)s|%(shortdays)s))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_SPECIAL = r'(?P<special>^[%(specials)s]+)\s+' % ptc.re_values
ptc.RE_UNITS = r'''(?P<qty>(-?\d+\s*
(?P<units>((%(units)s)s?))
))''' % ptc.re_values
ptc.RE_QUNITS = r'''(?P<qty>(-?\d+\s?
(?P<qunits>%(qunits)s)
(\s?|,|$)
))''' % ptc.re_values
ptc.RE_MODIFIER = r'''(\s?|^)
(?P<modifier>
(previous|prev|last|next|eod|eo|(end\sof)|(in\sa)))''' % ptc.re_values
ptc.RE_MODIFIER2 = r'''(\s?|^)
(?P<modifier>
(from|before|after|ago|prior))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_TIMEHMS = r'''(\s?|^)
(?P<hours>\d\d?)
(?P<tsep>%(timeseperator)s|)
(?P<minutes>\d\d)
(?:(?P=tsep)(?P<seconds>\d\d(?:[.,]\d+)?))?''' % ptc.re_values
ptc.RE_TIMEHMS2 = r'''(?P<hours>(\d\d?))
((?P<tsep>%(timeseperator)s|)
(?P<minutes>(\d\d?))
(?:(?P=tsep)
(?P<seconds>\d\d?
(?:[.,]\d+)?))?)?''' % ptc.re_values
if 'meridian' in ptc.re_values:
ptc.RE_TIMEHMS2 += r'\s?(?P<meridian>(%(meridian)s))' % ptc.re_values
dateSeps = ''.join(ptc.dateSep) + '.'
ptc.RE_DATE = r'''(\s?|^)
(?P<date>(\d\d?[%s]\d\d?([%s]\d\d(\d\d)?)?))
(\s?|$|[^0-9a-zA-Z])''' % (dateSeps, dateSeps)
ptc.RE_DATE2 = r'[%s]' % dateSeps
ptc.RE_DAY = r'''(\s?|^)
(?P<day>(today|tomorrow|yesterday))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_DAY2 = r'''(?P<day>\d\d?)|(?P<suffix>%(daysuffix)s)
''' % ptc.re_values
ptc.RE_TIME = r'''(\s?|^)
(?P<time>(morning|breakfast|noon|lunch|evening|midnight|tonight|dinner|night|now))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_REMAINING = r'\s+'
# Regex for date/time ranges
ptc.RE_RTIMEHMS = r'''(\s?|^)
(\d\d?)%(timeseperator)s
(\d\d)
(%(timeseperator)s(\d\d))?
(\s?|$)''' % ptc.re_values
ptc.RE_RTIMEHMS2 = r'''(\s?|^)
(\d\d?)
(%(timeseperator)s(\d\d?))?
(%(timeseperator)s(\d\d?))?''' % ptc.re_values
if 'meridian' in ptc.re_values:
ptc.RE_RTIMEHMS2 += r'\s?(%(meridian)s)' % ptc.re_values
ptc.RE_RDATE = r'(\d+([%s]\d+)+)' % dateSeps
ptc.RE_RDATE3 = r'''((((%(months)s))\s?
((\d\d?)
(\s?|%(daysuffix)s|$)+)?
(,\s?\d\d\d\d)?))''' % ptc.re_values
# "06/07/06 - 08/09/06"
ptc.DATERNG1 = ptc.RE_RDATE + r'\s?%(rangeseperator)s\s?' + ptc.RE_RDATE
ptc.DATERNG1 = ptc.DATERNG1 % ptc.re_values
# "march 31 - june 1st, 2006"
ptc.DATERNG2 = ptc.RE_RDATE3 + r'\s?%(rangeseperator)s\s?' + ptc.RE_RDATE3
ptc.DATERNG2 = ptc.DATERNG2 % ptc.re_values
# "march 1rd -13th"
ptc.DATERNG3 = ptc.RE_RDATE3 + r'\s?%(rangeseperator)s\s?(\d\d?)\s?(rd|st|nd|th)?'
ptc.DATERNG3 = ptc.DATERNG3 % ptc.re_values
# "4:00:55 pm - 5:90:44 am", '4p-5p'
ptc.TIMERNG1 = ptc.RE_RTIMEHMS2 + r'\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS2
ptc.TIMERNG1 = ptc.TIMERNG1 % ptc.re_values
# "4:00 - 5:90 ", "4:55:55-3:44:55"
ptc.TIMERNG2 = ptc.RE_RTIMEHMS + r'\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS
ptc.TIMERNG2 = ptc.TIMERNG2 % ptc.re_values
# "4-5pm "
ptc.TIMERNG3 = r'\d\d?\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS2
ptc.TIMERNG3 = ptc.TIMERNG3 % ptc.re_values
# "4:30-5pm "
ptc.TIMERNG4 = ptc.RE_RTIMEHMS + r'\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS2
ptc.TIMERNG4 = ptc.TIMERNG4 % ptc.re_values
|
python
|
def _initPatterns(ptc):
"""
Helper function to take the different localized bits from ptc and
create the regex strings.
"""
# TODO add code to parse the date formats and build the regexes up from sub-parts
# TODO find all hard-coded uses of date/time seperators
ptc.RE_DATE4 = r'''(?P<date>(((?P<day>\d\d?)(?P<suffix>%(daysuffix)s)?(,)?(\s)?)
(?P<mthname>(%(months)s|%(shortmonths)s))\s?
(?P<year>\d\d(\d\d)?)?
)
)''' % ptc.re_values
# I refactored DATE3 to fix Issue 16 http://code.google.com/p/parsedatetime/issues/detail?id=16
# I suspect the final line was for a trailing time - but testing shows it's not needed
# ptc.RE_DATE3 = r'''(?P<date>((?P<mthname>(%(months)s|%(shortmonths)s))\s?
# ((?P<day>\d\d?)(\s?|%(daysuffix)s|$)+)?
# (,\s?(?P<year>\d\d(\d\d)?))?))
# (\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_DATE3 = r'''(?P<date>(
(((?P<mthname>(%(months)s|%(shortmonths)s))|
((?P<day>\d\d?)(?P<suffix>%(daysuffix)s)?))(\s)?){1,2}
((,)?(\s)?(?P<year>\d\d(\d\d)?))?
)
)''' % ptc.re_values
ptc.RE_MONTH = r'''(\s?|^)
(?P<month>(
(?P<mthname>(%(months)s|%(shortmonths)s))
(\s?(?P<year>(\d\d\d\d)))?
))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_WEEKDAY = r'''(\s?|^)
(?P<weekday>(%(days)s|%(shortdays)s))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_SPECIAL = r'(?P<special>^[%(specials)s]+)\s+' % ptc.re_values
ptc.RE_UNITS = r'''(?P<qty>(-?\d+\s*
(?P<units>((%(units)s)s?))
))''' % ptc.re_values
ptc.RE_QUNITS = r'''(?P<qty>(-?\d+\s?
(?P<qunits>%(qunits)s)
(\s?|,|$)
))''' % ptc.re_values
ptc.RE_MODIFIER = r'''(\s?|^)
(?P<modifier>
(previous|prev|last|next|eod|eo|(end\sof)|(in\sa)))''' % ptc.re_values
ptc.RE_MODIFIER2 = r'''(\s?|^)
(?P<modifier>
(from|before|after|ago|prior))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_TIMEHMS = r'''(\s?|^)
(?P<hours>\d\d?)
(?P<tsep>%(timeseperator)s|)
(?P<minutes>\d\d)
(?:(?P=tsep)(?P<seconds>\d\d(?:[.,]\d+)?))?''' % ptc.re_values
ptc.RE_TIMEHMS2 = r'''(?P<hours>(\d\d?))
((?P<tsep>%(timeseperator)s|)
(?P<minutes>(\d\d?))
(?:(?P=tsep)
(?P<seconds>\d\d?
(?:[.,]\d+)?))?)?''' % ptc.re_values
if 'meridian' in ptc.re_values:
ptc.RE_TIMEHMS2 += r'\s?(?P<meridian>(%(meridian)s))' % ptc.re_values
dateSeps = ''.join(ptc.dateSep) + '.'
ptc.RE_DATE = r'''(\s?|^)
(?P<date>(\d\d?[%s]\d\d?([%s]\d\d(\d\d)?)?))
(\s?|$|[^0-9a-zA-Z])''' % (dateSeps, dateSeps)
ptc.RE_DATE2 = r'[%s]' % dateSeps
ptc.RE_DAY = r'''(\s?|^)
(?P<day>(today|tomorrow|yesterday))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_DAY2 = r'''(?P<day>\d\d?)|(?P<suffix>%(daysuffix)s)
''' % ptc.re_values
ptc.RE_TIME = r'''(\s?|^)
(?P<time>(morning|breakfast|noon|lunch|evening|midnight|tonight|dinner|night|now))
(\s?|$|[^0-9a-zA-Z])''' % ptc.re_values
ptc.RE_REMAINING = r'\s+'
# Regex for date/time ranges
ptc.RE_RTIMEHMS = r'''(\s?|^)
(\d\d?)%(timeseperator)s
(\d\d)
(%(timeseperator)s(\d\d))?
(\s?|$)''' % ptc.re_values
ptc.RE_RTIMEHMS2 = r'''(\s?|^)
(\d\d?)
(%(timeseperator)s(\d\d?))?
(%(timeseperator)s(\d\d?))?''' % ptc.re_values
if 'meridian' in ptc.re_values:
ptc.RE_RTIMEHMS2 += r'\s?(%(meridian)s)' % ptc.re_values
ptc.RE_RDATE = r'(\d+([%s]\d+)+)' % dateSeps
ptc.RE_RDATE3 = r'''((((%(months)s))\s?
((\d\d?)
(\s?|%(daysuffix)s|$)+)?
(,\s?\d\d\d\d)?))''' % ptc.re_values
# "06/07/06 - 08/09/06"
ptc.DATERNG1 = ptc.RE_RDATE + r'\s?%(rangeseperator)s\s?' + ptc.RE_RDATE
ptc.DATERNG1 = ptc.DATERNG1 % ptc.re_values
# "march 31 - june 1st, 2006"
ptc.DATERNG2 = ptc.RE_RDATE3 + r'\s?%(rangeseperator)s\s?' + ptc.RE_RDATE3
ptc.DATERNG2 = ptc.DATERNG2 % ptc.re_values
# "march 1rd -13th"
ptc.DATERNG3 = ptc.RE_RDATE3 + r'\s?%(rangeseperator)s\s?(\d\d?)\s?(rd|st|nd|th)?'
ptc.DATERNG3 = ptc.DATERNG3 % ptc.re_values
# "4:00:55 pm - 5:90:44 am", '4p-5p'
ptc.TIMERNG1 = ptc.RE_RTIMEHMS2 + r'\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS2
ptc.TIMERNG1 = ptc.TIMERNG1 % ptc.re_values
# "4:00 - 5:90 ", "4:55:55-3:44:55"
ptc.TIMERNG2 = ptc.RE_RTIMEHMS + r'\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS
ptc.TIMERNG2 = ptc.TIMERNG2 % ptc.re_values
# "4-5pm "
ptc.TIMERNG3 = r'\d\d?\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS2
ptc.TIMERNG3 = ptc.TIMERNG3 % ptc.re_values
# "4:30-5pm "
ptc.TIMERNG4 = ptc.RE_RTIMEHMS + r'\s?%(rangeseperator)s\s?' + ptc.RE_RTIMEHMS2
ptc.TIMERNG4 = ptc.TIMERNG4 % ptc.re_values
|
[
"def",
"_initPatterns",
"(",
"ptc",
")",
":",
"# TODO add code to parse the date formats and build the regexes up from sub-parts",
"# TODO find all hard-coded uses of date/time seperators",
"ptc",
".",
"RE_DATE4",
"=",
"r'''(?P<date>(((?P<day>\\d\\d?)(?P<suffix>%(daysuffix)s)?(,)?(\\s)?)\n (?P<mthname>(%(months)s|%(shortmonths)s))\\s?\n (?P<year>\\d\\d(\\d\\d)?)?\n )\n )'''",
"%",
"ptc",
".",
"re_values",
"# I refactored DATE3 to fix Issue 16 http://code.google.com/p/parsedatetime/issues/detail?id=16",
"# I suspect the final line was for a trailing time - but testing shows it's not needed",
"# ptc.RE_DATE3 = r'''(?P<date>((?P<mthname>(%(months)s|%(shortmonths)s))\\s?",
"# ((?P<day>\\d\\d?)(\\s?|%(daysuffix)s|$)+)?",
"# (,\\s?(?P<year>\\d\\d(\\d\\d)?))?))",
"# (\\s?|$|[^0-9a-zA-Z])''' % ptc.re_values",
"ptc",
".",
"RE_DATE3",
"=",
"r'''(?P<date>(\n (((?P<mthname>(%(months)s|%(shortmonths)s))|\n ((?P<day>\\d\\d?)(?P<suffix>%(daysuffix)s)?))(\\s)?){1,2}\n ((,)?(\\s)?(?P<year>\\d\\d(\\d\\d)?))?\n )\n )'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_MONTH",
"=",
"r'''(\\s?|^)\n (?P<month>(\n (?P<mthname>(%(months)s|%(shortmonths)s))\n (\\s?(?P<year>(\\d\\d\\d\\d)))?\n ))\n (\\s?|$|[^0-9a-zA-Z])'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_WEEKDAY",
"=",
"r'''(\\s?|^)\n (?P<weekday>(%(days)s|%(shortdays)s))\n (\\s?|$|[^0-9a-zA-Z])'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_SPECIAL",
"=",
"r'(?P<special>^[%(specials)s]+)\\s+'",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_UNITS",
"=",
"r'''(?P<qty>(-?\\d+\\s*\n (?P<units>((%(units)s)s?))\n ))'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_QUNITS",
"=",
"r'''(?P<qty>(-?\\d+\\s?\n (?P<qunits>%(qunits)s)\n (\\s?|,|$)\n ))'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_MODIFIER",
"=",
"r'''(\\s?|^)\n (?P<modifier>\n (previous|prev|last|next|eod|eo|(end\\sof)|(in\\sa)))'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_MODIFIER2",
"=",
"r'''(\\s?|^)\n (?P<modifier>\n (from|before|after|ago|prior))\n (\\s?|$|[^0-9a-zA-Z])'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_TIMEHMS",
"=",
"r'''(\\s?|^)\n (?P<hours>\\d\\d?)\n (?P<tsep>%(timeseperator)s|)\n (?P<minutes>\\d\\d)\n (?:(?P=tsep)(?P<seconds>\\d\\d(?:[.,]\\d+)?))?'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_TIMEHMS2",
"=",
"r'''(?P<hours>(\\d\\d?))\n ((?P<tsep>%(timeseperator)s|)\n (?P<minutes>(\\d\\d?))\n (?:(?P=tsep)\n (?P<seconds>\\d\\d?\n (?:[.,]\\d+)?))?)?'''",
"%",
"ptc",
".",
"re_values",
"if",
"'meridian'",
"in",
"ptc",
".",
"re_values",
":",
"ptc",
".",
"RE_TIMEHMS2",
"+=",
"r'\\s?(?P<meridian>(%(meridian)s))'",
"%",
"ptc",
".",
"re_values",
"dateSeps",
"=",
"''",
".",
"join",
"(",
"ptc",
".",
"dateSep",
")",
"+",
"'.'",
"ptc",
".",
"RE_DATE",
"=",
"r'''(\\s?|^)\n (?P<date>(\\d\\d?[%s]\\d\\d?([%s]\\d\\d(\\d\\d)?)?))\n (\\s?|$|[^0-9a-zA-Z])'''",
"%",
"(",
"dateSeps",
",",
"dateSeps",
")",
"ptc",
".",
"RE_DATE2",
"=",
"r'[%s]'",
"%",
"dateSeps",
"ptc",
".",
"RE_DAY",
"=",
"r'''(\\s?|^)\n (?P<day>(today|tomorrow|yesterday))\n (\\s?|$|[^0-9a-zA-Z])'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_DAY2",
"=",
"r'''(?P<day>\\d\\d?)|(?P<suffix>%(daysuffix)s)\n '''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_TIME",
"=",
"r'''(\\s?|^)\n (?P<time>(morning|breakfast|noon|lunch|evening|midnight|tonight|dinner|night|now))\n (\\s?|$|[^0-9a-zA-Z])'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_REMAINING",
"=",
"r'\\s+'",
"# Regex for date/time ranges",
"ptc",
".",
"RE_RTIMEHMS",
"=",
"r'''(\\s?|^)\n (\\d\\d?)%(timeseperator)s\n (\\d\\d)\n (%(timeseperator)s(\\d\\d))?\n (\\s?|$)'''",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_RTIMEHMS2",
"=",
"r'''(\\s?|^)\n (\\d\\d?)\n (%(timeseperator)s(\\d\\d?))?\n (%(timeseperator)s(\\d\\d?))?'''",
"%",
"ptc",
".",
"re_values",
"if",
"'meridian'",
"in",
"ptc",
".",
"re_values",
":",
"ptc",
".",
"RE_RTIMEHMS2",
"+=",
"r'\\s?(%(meridian)s)'",
"%",
"ptc",
".",
"re_values",
"ptc",
".",
"RE_RDATE",
"=",
"r'(\\d+([%s]\\d+)+)'",
"%",
"dateSeps",
"ptc",
".",
"RE_RDATE3",
"=",
"r'''((((%(months)s))\\s?\n ((\\d\\d?)\n (\\s?|%(daysuffix)s|$)+)?\n (,\\s?\\d\\d\\d\\d)?))'''",
"%",
"ptc",
".",
"re_values",
"# \"06/07/06 - 08/09/06\"",
"ptc",
".",
"DATERNG1",
"=",
"ptc",
".",
"RE_RDATE",
"+",
"r'\\s?%(rangeseperator)s\\s?'",
"+",
"ptc",
".",
"RE_RDATE",
"ptc",
".",
"DATERNG1",
"=",
"ptc",
".",
"DATERNG1",
"%",
"ptc",
".",
"re_values",
"# \"march 31 - june 1st, 2006\"",
"ptc",
".",
"DATERNG2",
"=",
"ptc",
".",
"RE_RDATE3",
"+",
"r'\\s?%(rangeseperator)s\\s?'",
"+",
"ptc",
".",
"RE_RDATE3",
"ptc",
".",
"DATERNG2",
"=",
"ptc",
".",
"DATERNG2",
"%",
"ptc",
".",
"re_values",
"# \"march 1rd -13th\"",
"ptc",
".",
"DATERNG3",
"=",
"ptc",
".",
"RE_RDATE3",
"+",
"r'\\s?%(rangeseperator)s\\s?(\\d\\d?)\\s?(rd|st|nd|th)?'",
"ptc",
".",
"DATERNG3",
"=",
"ptc",
".",
"DATERNG3",
"%",
"ptc",
".",
"re_values",
"# \"4:00:55 pm - 5:90:44 am\", '4p-5p'",
"ptc",
".",
"TIMERNG1",
"=",
"ptc",
".",
"RE_RTIMEHMS2",
"+",
"r'\\s?%(rangeseperator)s\\s?'",
"+",
"ptc",
".",
"RE_RTIMEHMS2",
"ptc",
".",
"TIMERNG1",
"=",
"ptc",
".",
"TIMERNG1",
"%",
"ptc",
".",
"re_values",
"# \"4:00 - 5:90 \", \"4:55:55-3:44:55\"",
"ptc",
".",
"TIMERNG2",
"=",
"ptc",
".",
"RE_RTIMEHMS",
"+",
"r'\\s?%(rangeseperator)s\\s?'",
"+",
"ptc",
".",
"RE_RTIMEHMS",
"ptc",
".",
"TIMERNG2",
"=",
"ptc",
".",
"TIMERNG2",
"%",
"ptc",
".",
"re_values",
"# \"4-5pm \"",
"ptc",
".",
"TIMERNG3",
"=",
"r'\\d\\d?\\s?%(rangeseperator)s\\s?'",
"+",
"ptc",
".",
"RE_RTIMEHMS2",
"ptc",
".",
"TIMERNG3",
"=",
"ptc",
".",
"TIMERNG3",
"%",
"ptc",
".",
"re_values",
"# \"4:30-5pm \"",
"ptc",
".",
"TIMERNG4",
"=",
"ptc",
".",
"RE_RTIMEHMS",
"+",
"r'\\s?%(rangeseperator)s\\s?'",
"+",
"ptc",
".",
"RE_RTIMEHMS2",
"ptc",
".",
"TIMERNG4",
"=",
"ptc",
".",
"TIMERNG4",
"%",
"ptc",
".",
"re_values"
] |
Helper function to take the different localized bits from ptc and
create the regex strings.
|
[
"Helper",
"function",
"to",
"take",
"the",
"different",
"localized",
"bits",
"from",
"ptc",
"and",
"create",
"the",
"regex",
"strings",
"."
] |
7e5b60d2900ceddefa49de352a19b794199b51a8
|
https://github.com/vilmibm/done/blob/7e5b60d2900ceddefa49de352a19b794199b51a8/parsedatetime/parsedatetime_consts.py#L710-L838
|
241,672
|
vilmibm/done
|
parsedatetime/parsedatetime_consts.py
|
_initConstants
|
def _initConstants(ptc):
"""
Create localized versions of the units, week and month names
"""
# build weekday offsets - yes, it assumes the Weekday and shortWeekday
# lists are in the same order and Mon..Sun (Python style)
ptc.WeekdayOffsets = {}
o = 0
for key in ptc.Weekdays:
ptc.WeekdayOffsets[key] = o
o += 1
o = 0
for key in ptc.shortWeekdays:
ptc.WeekdayOffsets[key] = o
o += 1
# build month offsets - yes, it assumes the Months and shortMonths
# lists are in the same order and Jan..Dec
ptc.MonthOffsets = {}
o = 1
for key in ptc.Months:
ptc.MonthOffsets[key] = o
o += 1
o = 1
for key in ptc.shortMonths:
ptc.MonthOffsets[key] = o
o += 1
|
python
|
def _initConstants(ptc):
"""
Create localized versions of the units, week and month names
"""
# build weekday offsets - yes, it assumes the Weekday and shortWeekday
# lists are in the same order and Mon..Sun (Python style)
ptc.WeekdayOffsets = {}
o = 0
for key in ptc.Weekdays:
ptc.WeekdayOffsets[key] = o
o += 1
o = 0
for key in ptc.shortWeekdays:
ptc.WeekdayOffsets[key] = o
o += 1
# build month offsets - yes, it assumes the Months and shortMonths
# lists are in the same order and Jan..Dec
ptc.MonthOffsets = {}
o = 1
for key in ptc.Months:
ptc.MonthOffsets[key] = o
o += 1
o = 1
for key in ptc.shortMonths:
ptc.MonthOffsets[key] = o
o += 1
|
[
"def",
"_initConstants",
"(",
"ptc",
")",
":",
"# build weekday offsets - yes, it assumes the Weekday and shortWeekday",
"# lists are in the same order and Mon..Sun (Python style)",
"ptc",
".",
"WeekdayOffsets",
"=",
"{",
"}",
"o",
"=",
"0",
"for",
"key",
"in",
"ptc",
".",
"Weekdays",
":",
"ptc",
".",
"WeekdayOffsets",
"[",
"key",
"]",
"=",
"o",
"o",
"+=",
"1",
"o",
"=",
"0",
"for",
"key",
"in",
"ptc",
".",
"shortWeekdays",
":",
"ptc",
".",
"WeekdayOffsets",
"[",
"key",
"]",
"=",
"o",
"o",
"+=",
"1",
"# build month offsets - yes, it assumes the Months and shortMonths",
"# lists are in the same order and Jan..Dec",
"ptc",
".",
"MonthOffsets",
"=",
"{",
"}",
"o",
"=",
"1",
"for",
"key",
"in",
"ptc",
".",
"Months",
":",
"ptc",
".",
"MonthOffsets",
"[",
"key",
"]",
"=",
"o",
"o",
"+=",
"1",
"o",
"=",
"1",
"for",
"key",
"in",
"ptc",
".",
"shortMonths",
":",
"ptc",
".",
"MonthOffsets",
"[",
"key",
"]",
"=",
"o",
"o",
"+=",
"1"
] |
Create localized versions of the units, week and month names
|
[
"Create",
"localized",
"versions",
"of",
"the",
"units",
"week",
"and",
"month",
"names"
] |
7e5b60d2900ceddefa49de352a19b794199b51a8
|
https://github.com/vilmibm/done/blob/7e5b60d2900ceddefa49de352a19b794199b51a8/parsedatetime/parsedatetime_consts.py#L841-L869
|
241,673
|
krinj/k-util
|
k_util/serializable.py
|
Serializable.write_to_file
|
def write_to_file(self, file_path: str) -> None:
""" Serialize and write the data into a JSON file. """
data = self.encode()
with open(file_path, "w") as f:
json.dump(data, f, indent=1)
|
python
|
def write_to_file(self, file_path: str) -> None:
""" Serialize and write the data into a JSON file. """
data = self.encode()
with open(file_path, "w") as f:
json.dump(data, f, indent=1)
|
[
"def",
"write_to_file",
"(",
"self",
",",
"file_path",
":",
"str",
")",
"->",
"None",
":",
"data",
"=",
"self",
".",
"encode",
"(",
")",
"with",
"open",
"(",
"file_path",
",",
"\"w\"",
")",
"as",
"f",
":",
"json",
".",
"dump",
"(",
"data",
",",
"f",
",",
"indent",
"=",
"1",
")"
] |
Serialize and write the data into a JSON file.
|
[
"Serialize",
"and",
"write",
"the",
"data",
"into",
"a",
"JSON",
"file",
"."
] |
b118826b1d6f49ca4e1ca7327d5b171db332ac23
|
https://github.com/krinj/k-util/blob/b118826b1d6f49ca4e1ca7327d5b171db332ac23/k_util/serializable.py#L54-L58
|
241,674
|
krinj/k-util
|
k_util/serializable.py
|
Serializable.load_from_file
|
def load_from_file(cls, file_path: str):
""" Read and reconstruct the data from a JSON file. """
with open(file_path, "r") as f:
data = json.load(f)
item = cls.decode(data=data)
return item
|
python
|
def load_from_file(cls, file_path: str):
""" Read and reconstruct the data from a JSON file. """
with open(file_path, "r") as f:
data = json.load(f)
item = cls.decode(data=data)
return item
|
[
"def",
"load_from_file",
"(",
"cls",
",",
"file_path",
":",
"str",
")",
":",
"with",
"open",
"(",
"file_path",
",",
"\"r\"",
")",
"as",
"f",
":",
"data",
"=",
"json",
".",
"load",
"(",
"f",
")",
"item",
"=",
"cls",
".",
"decode",
"(",
"data",
"=",
"data",
")",
"return",
"item"
] |
Read and reconstruct the data from a JSON file.
|
[
"Read",
"and",
"reconstruct",
"the",
"data",
"from",
"a",
"JSON",
"file",
"."
] |
b118826b1d6f49ca4e1ca7327d5b171db332ac23
|
https://github.com/krinj/k-util/blob/b118826b1d6f49ca4e1ca7327d5b171db332ac23/k_util/serializable.py#L61-L66
|
241,675
|
fr33jc/bang
|
bang/providers/bases.py
|
Provider.gen_component_name
|
def gen_component_name(self, basename, postfix_length=13):
"""
Creates a resource identifier with a random postfix. This is an
attempt to minimize name collisions in provider namespaces.
:param str basename: The string that will be prefixed with the stack
name, and postfixed with some random string.
:param int postfix_length: The length of the postfix to be appended.
"""
def newcname():
postfix = ''.join(
random.choice(_AWS_NAME_CHARS)
for i in xrange(postfix_length)
)
return '%s-%s' % (basename, postfix)
cname = newcname()
while cname in self.component_names:
cname = newcname()
self.component_names.append(cname)
return cname
|
python
|
def gen_component_name(self, basename, postfix_length=13):
"""
Creates a resource identifier with a random postfix. This is an
attempt to minimize name collisions in provider namespaces.
:param str basename: The string that will be prefixed with the stack
name, and postfixed with some random string.
:param int postfix_length: The length of the postfix to be appended.
"""
def newcname():
postfix = ''.join(
random.choice(_AWS_NAME_CHARS)
for i in xrange(postfix_length)
)
return '%s-%s' % (basename, postfix)
cname = newcname()
while cname in self.component_names:
cname = newcname()
self.component_names.append(cname)
return cname
|
[
"def",
"gen_component_name",
"(",
"self",
",",
"basename",
",",
"postfix_length",
"=",
"13",
")",
":",
"def",
"newcname",
"(",
")",
":",
"postfix",
"=",
"''",
".",
"join",
"(",
"random",
".",
"choice",
"(",
"_AWS_NAME_CHARS",
")",
"for",
"i",
"in",
"xrange",
"(",
"postfix_length",
")",
")",
"return",
"'%s-%s'",
"%",
"(",
"basename",
",",
"postfix",
")",
"cname",
"=",
"newcname",
"(",
")",
"while",
"cname",
"in",
"self",
".",
"component_names",
":",
"cname",
"=",
"newcname",
"(",
")",
"self",
".",
"component_names",
".",
"append",
"(",
"cname",
")",
"return",
"cname"
] |
Creates a resource identifier with a random postfix. This is an
attempt to minimize name collisions in provider namespaces.
:param str basename: The string that will be prefixed with the stack
name, and postfixed with some random string.
:param int postfix_length: The length of the postfix to be appended.
|
[
"Creates",
"a",
"resource",
"identifier",
"with",
"a",
"random",
"postfix",
".",
"This",
"is",
"an",
"attempt",
"to",
"minimize",
"name",
"collisions",
"in",
"provider",
"namespaces",
"."
] |
8f000713f88d2a9a8c1193b63ca10a6578560c16
|
https://github.com/fr33jc/bang/blob/8f000713f88d2a9a8c1193b63ca10a6578560c16/bang/providers/bases.py#L32-L53
|
241,676
|
political-memory/django-representatives
|
representatives/migrations/0020_rep_unique_slug_remove_remoteid.py
|
update_slugs
|
def update_slugs(apps, schema_editor):
"""
Include birthdate in slugs
"""
# Get model managers
Representative = apps.get_model("representatives", "Representative")
for rep in Representative.objects.all():
rep.slug = '%s-%s' % (rep.slug, rep.birth_date)
rep.save()
|
python
|
def update_slugs(apps, schema_editor):
"""
Include birthdate in slugs
"""
# Get model managers
Representative = apps.get_model("representatives", "Representative")
for rep in Representative.objects.all():
rep.slug = '%s-%s' % (rep.slug, rep.birth_date)
rep.save()
|
[
"def",
"update_slugs",
"(",
"apps",
",",
"schema_editor",
")",
":",
"# Get model managers",
"Representative",
"=",
"apps",
".",
"get_model",
"(",
"\"representatives\"",
",",
"\"Representative\"",
")",
"for",
"rep",
"in",
"Representative",
".",
"objects",
".",
"all",
"(",
")",
":",
"rep",
".",
"slug",
"=",
"'%s-%s'",
"%",
"(",
"rep",
".",
"slug",
",",
"rep",
".",
"birth_date",
")",
"rep",
".",
"save",
"(",
")"
] |
Include birthdate in slugs
|
[
"Include",
"birthdate",
"in",
"slugs"
] |
811c90d0250149e913e6196f0ab11c97d396be39
|
https://github.com/political-memory/django-representatives/blob/811c90d0250149e913e6196f0ab11c97d396be39/representatives/migrations/0020_rep_unique_slug_remove_remoteid.py#L9-L19
|
241,677
|
political-memory/django-representatives
|
representatives/migrations/0020_rep_unique_slug_remove_remoteid.py
|
create_parl_websites
|
def create_parl_websites(apps, schema_editor):
"""
Prepare for remote_id removal by creating WebSite entities from it.
"""
# Get model managers
Representative = apps.get_model("representatives", "Representative")
WebSite = apps.get_model("representatives", "WebSite")
today = datetime.date.today()
# EP
ep_url = 'http://www.europarl.europa.eu/meps/en/%s/_home.html'
qs = Representative.objects.filter(
models.Q(mandates__end_date__gte=today) |
models.Q(mandates__end_date__isnull=True),
mandates__group__chamber__abbreviation='EP'
)
for rep in qs:
changed = False
url = ep_url % rep.remote_id
try:
site = WebSite.objects.get(representative=rep, kind='EP')
except WebSite.DoesNotExist:
site = WebSite(representative=rep, kind='EP', url=url)
changed = True
if site.url != url:
site.url = url
changed = True
if changed:
site.save()
# AN/SEN
for chamber in ['AN', 'SEN']:
qs = Representative.objects.filter(
models.Q(mandates__end_date__gte=today) |
models.Q(mandates__end_date__isnull=True),
mandates__group__chamber__abbreviation=chamber
)
for rep in qs:
changed = False
url = rep.remote_id
try:
site = WebSite.objects.get(representative=rep, kind=chamber)
except WebSite.DoesNotExist:
site = WebSite(representative=rep, kind=chamber, url=url)
changed = True
if site.url != url:
site.url = url
changed = True
if changed:
site.save()
|
python
|
def create_parl_websites(apps, schema_editor):
"""
Prepare for remote_id removal by creating WebSite entities from it.
"""
# Get model managers
Representative = apps.get_model("representatives", "Representative")
WebSite = apps.get_model("representatives", "WebSite")
today = datetime.date.today()
# EP
ep_url = 'http://www.europarl.europa.eu/meps/en/%s/_home.html'
qs = Representative.objects.filter(
models.Q(mandates__end_date__gte=today) |
models.Q(mandates__end_date__isnull=True),
mandates__group__chamber__abbreviation='EP'
)
for rep in qs:
changed = False
url = ep_url % rep.remote_id
try:
site = WebSite.objects.get(representative=rep, kind='EP')
except WebSite.DoesNotExist:
site = WebSite(representative=rep, kind='EP', url=url)
changed = True
if site.url != url:
site.url = url
changed = True
if changed:
site.save()
# AN/SEN
for chamber in ['AN', 'SEN']:
qs = Representative.objects.filter(
models.Q(mandates__end_date__gte=today) |
models.Q(mandates__end_date__isnull=True),
mandates__group__chamber__abbreviation=chamber
)
for rep in qs:
changed = False
url = rep.remote_id
try:
site = WebSite.objects.get(representative=rep, kind=chamber)
except WebSite.DoesNotExist:
site = WebSite(representative=rep, kind=chamber, url=url)
changed = True
if site.url != url:
site.url = url
changed = True
if changed:
site.save()
|
[
"def",
"create_parl_websites",
"(",
"apps",
",",
"schema_editor",
")",
":",
"# Get model managers",
"Representative",
"=",
"apps",
".",
"get_model",
"(",
"\"representatives\"",
",",
"\"Representative\"",
")",
"WebSite",
"=",
"apps",
".",
"get_model",
"(",
"\"representatives\"",
",",
"\"WebSite\"",
")",
"today",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"# EP",
"ep_url",
"=",
"'http://www.europarl.europa.eu/meps/en/%s/_home.html'",
"qs",
"=",
"Representative",
".",
"objects",
".",
"filter",
"(",
"models",
".",
"Q",
"(",
"mandates__end_date__gte",
"=",
"today",
")",
"|",
"models",
".",
"Q",
"(",
"mandates__end_date__isnull",
"=",
"True",
")",
",",
"mandates__group__chamber__abbreviation",
"=",
"'EP'",
")",
"for",
"rep",
"in",
"qs",
":",
"changed",
"=",
"False",
"url",
"=",
"ep_url",
"%",
"rep",
".",
"remote_id",
"try",
":",
"site",
"=",
"WebSite",
".",
"objects",
".",
"get",
"(",
"representative",
"=",
"rep",
",",
"kind",
"=",
"'EP'",
")",
"except",
"WebSite",
".",
"DoesNotExist",
":",
"site",
"=",
"WebSite",
"(",
"representative",
"=",
"rep",
",",
"kind",
"=",
"'EP'",
",",
"url",
"=",
"url",
")",
"changed",
"=",
"True",
"if",
"site",
".",
"url",
"!=",
"url",
":",
"site",
".",
"url",
"=",
"url",
"changed",
"=",
"True",
"if",
"changed",
":",
"site",
".",
"save",
"(",
")",
"# AN/SEN",
"for",
"chamber",
"in",
"[",
"'AN'",
",",
"'SEN'",
"]",
":",
"qs",
"=",
"Representative",
".",
"objects",
".",
"filter",
"(",
"models",
".",
"Q",
"(",
"mandates__end_date__gte",
"=",
"today",
")",
"|",
"models",
".",
"Q",
"(",
"mandates__end_date__isnull",
"=",
"True",
")",
",",
"mandates__group__chamber__abbreviation",
"=",
"chamber",
")",
"for",
"rep",
"in",
"qs",
":",
"changed",
"=",
"False",
"url",
"=",
"rep",
".",
"remote_id",
"try",
":",
"site",
"=",
"WebSite",
".",
"objects",
".",
"get",
"(",
"representative",
"=",
"rep",
",",
"kind",
"=",
"chamber",
")",
"except",
"WebSite",
".",
"DoesNotExist",
":",
"site",
"=",
"WebSite",
"(",
"representative",
"=",
"rep",
",",
"kind",
"=",
"chamber",
",",
"url",
"=",
"url",
")",
"changed",
"=",
"True",
"if",
"site",
".",
"url",
"!=",
"url",
":",
"site",
".",
"url",
"=",
"url",
"changed",
"=",
"True",
"if",
"changed",
":",
"site",
".",
"save",
"(",
")"
] |
Prepare for remote_id removal by creating WebSite entities from it.
|
[
"Prepare",
"for",
"remote_id",
"removal",
"by",
"creating",
"WebSite",
"entities",
"from",
"it",
"."
] |
811c90d0250149e913e6196f0ab11c97d396be39
|
https://github.com/political-memory/django-representatives/blob/811c90d0250149e913e6196f0ab11c97d396be39/representatives/migrations/0020_rep_unique_slug_remove_remoteid.py#L22-L79
|
241,678
|
phenicle/cfgpy
|
cfgpy/tools.py
|
determine_file_extension_based_on_format
|
def determine_file_extension_based_on_format(format_specifier):
""" returns file extension string """
if format_specifier == FMT_INI:
return 'ini'
if format_specifier == FMT_DELIMITED:
return ''
if format_specifier == FMT_XML:
return 'xml'
if format_specifier == FMT_JSON:
return 'json'
if format_specifier == FMT_YAML:
return 'yml'
raise ValueError('invalid format specifier: {}'.format(format_specifier))
|
python
|
def determine_file_extension_based_on_format(format_specifier):
""" returns file extension string """
if format_specifier == FMT_INI:
return 'ini'
if format_specifier == FMT_DELIMITED:
return ''
if format_specifier == FMT_XML:
return 'xml'
if format_specifier == FMT_JSON:
return 'json'
if format_specifier == FMT_YAML:
return 'yml'
raise ValueError('invalid format specifier: {}'.format(format_specifier))
|
[
"def",
"determine_file_extension_based_on_format",
"(",
"format_specifier",
")",
":",
"if",
"format_specifier",
"==",
"FMT_INI",
":",
"return",
"'ini'",
"if",
"format_specifier",
"==",
"FMT_DELIMITED",
":",
"return",
"''",
"if",
"format_specifier",
"==",
"FMT_XML",
":",
"return",
"'xml'",
"if",
"format_specifier",
"==",
"FMT_JSON",
":",
"return",
"'json'",
"if",
"format_specifier",
"==",
"FMT_YAML",
":",
"return",
"'yml'",
"raise",
"ValueError",
"(",
"'invalid format specifier: {}'",
".",
"format",
"(",
"format_specifier",
")",
")"
] |
returns file extension string
|
[
"returns",
"file",
"extension",
"string"
] |
d53a9ad4c91cdcd774b59cf3b8c59096fdf29d20
|
https://github.com/phenicle/cfgpy/blob/d53a9ad4c91cdcd774b59cf3b8c59096fdf29d20/cfgpy/tools.py#L19-L31
|
241,679
|
phenicle/cfgpy
|
cfgpy/tools.py
|
CfgPy.read_element_using_argtuple
|
def read_element_using_argtuple(self, argtuple):
"""
takes a tuple of keys
returns node found in cfg_dict
found by traversing cfg_dict by successive
application of keys from element_path
"""
# doesn't support DELIMITED, only dict-based formats
if self.format == FMT_DELIMITED:
return None
node = self.cfg_dict
for key in argtuple:
node = node[key]
return node
|
python
|
def read_element_using_argtuple(self, argtuple):
"""
takes a tuple of keys
returns node found in cfg_dict
found by traversing cfg_dict by successive
application of keys from element_path
"""
# doesn't support DELIMITED, only dict-based formats
if self.format == FMT_DELIMITED:
return None
node = self.cfg_dict
for key in argtuple:
node = node[key]
return node
|
[
"def",
"read_element_using_argtuple",
"(",
"self",
",",
"argtuple",
")",
":",
"# doesn't support DELIMITED, only dict-based formats",
"if",
"self",
".",
"format",
"==",
"FMT_DELIMITED",
":",
"return",
"None",
"node",
"=",
"self",
".",
"cfg_dict",
"for",
"key",
"in",
"argtuple",
":",
"node",
"=",
"node",
"[",
"key",
"]",
"return",
"node"
] |
takes a tuple of keys
returns node found in cfg_dict
found by traversing cfg_dict by successive
application of keys from element_path
|
[
"takes",
"a",
"tuple",
"of",
"keys",
"returns",
"node",
"found",
"in",
"cfg_dict",
"found",
"by",
"traversing",
"cfg_dict",
"by",
"successive",
"application",
"of",
"keys",
"from",
"element_path"
] |
d53a9ad4c91cdcd774b59cf3b8c59096fdf29d20
|
https://github.com/phenicle/cfgpy/blob/d53a9ad4c91cdcd774b59cf3b8c59096fdf29d20/cfgpy/tools.py#L144-L159
|
241,680
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
Outline.create_arc
|
def create_arc(self, mace_type, name):
'''
Creates the story arc and initial tree for that arc
for the current outline. Returns the resulting Arc
instance.
'''
arc = Arc(mace_type=mace_type, outline=self, name=name)
arc.save()
milestone_count = arc.generate_template_arc_tree()
if milestone_count == 7: # pragma: no cover
arc.refresh_from_db()
return arc
else:
raise ArcIntegrityError('Something went wrong during arc template generation')
|
python
|
def create_arc(self, mace_type, name):
'''
Creates the story arc and initial tree for that arc
for the current outline. Returns the resulting Arc
instance.
'''
arc = Arc(mace_type=mace_type, outline=self, name=name)
arc.save()
milestone_count = arc.generate_template_arc_tree()
if milestone_count == 7: # pragma: no cover
arc.refresh_from_db()
return arc
else:
raise ArcIntegrityError('Something went wrong during arc template generation')
|
[
"def",
"create_arc",
"(",
"self",
",",
"mace_type",
",",
"name",
")",
":",
"arc",
"=",
"Arc",
"(",
"mace_type",
"=",
"mace_type",
",",
"outline",
"=",
"self",
",",
"name",
"=",
"name",
")",
"arc",
".",
"save",
"(",
")",
"milestone_count",
"=",
"arc",
".",
"generate_template_arc_tree",
"(",
")",
"if",
"milestone_count",
"==",
"7",
":",
"# pragma: no cover",
"arc",
".",
"refresh_from_db",
"(",
")",
"return",
"arc",
"else",
":",
"raise",
"ArcIntegrityError",
"(",
"'Something went wrong during arc template generation'",
")"
] |
Creates the story arc and initial tree for that arc
for the current outline. Returns the resulting Arc
instance.
|
[
"Creates",
"the",
"story",
"arc",
"and",
"initial",
"tree",
"for",
"that",
"arc",
"for",
"the",
"current",
"outline",
".",
"Returns",
"the",
"resulting",
"Arc",
"instance",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L410-L423
|
241,681
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
Arc.generate_template_arc_tree
|
def generate_template_arc_tree(self):
'''
Generate a seven point template in this arc. Arc must be empty.
'''
arc_root = self.arc_root_node
if not arc_root:
arc_root = ArcElementNode.add_root(
arc_element_type='root',
description='root of arc %s' % self.name,
arc=self
)
if arc_root.get_children():
raise ArcIntegrityError(_("This arc already has elements. You cannot build a template on top of it"))
for key, value in ARC_NODE_ELEMENT_DEFINITIONS.items():
if value['milestone']:
arc_root.add_child(arc_element_type=key, description=value['template_description'])
arc_root.refresh_from_db()
return ArcElementNode.objects.get(pk=arc_root.pk).get_children().count()
|
python
|
def generate_template_arc_tree(self):
'''
Generate a seven point template in this arc. Arc must be empty.
'''
arc_root = self.arc_root_node
if not arc_root:
arc_root = ArcElementNode.add_root(
arc_element_type='root',
description='root of arc %s' % self.name,
arc=self
)
if arc_root.get_children():
raise ArcIntegrityError(_("This arc already has elements. You cannot build a template on top of it"))
for key, value in ARC_NODE_ELEMENT_DEFINITIONS.items():
if value['milestone']:
arc_root.add_child(arc_element_type=key, description=value['template_description'])
arc_root.refresh_from_db()
return ArcElementNode.objects.get(pk=arc_root.pk).get_children().count()
|
[
"def",
"generate_template_arc_tree",
"(",
"self",
")",
":",
"arc_root",
"=",
"self",
".",
"arc_root_node",
"if",
"not",
"arc_root",
":",
"arc_root",
"=",
"ArcElementNode",
".",
"add_root",
"(",
"arc_element_type",
"=",
"'root'",
",",
"description",
"=",
"'root of arc %s'",
"%",
"self",
".",
"name",
",",
"arc",
"=",
"self",
")",
"if",
"arc_root",
".",
"get_children",
"(",
")",
":",
"raise",
"ArcIntegrityError",
"(",
"_",
"(",
"\"This arc already has elements. You cannot build a template on top of it\"",
")",
")",
"for",
"key",
",",
"value",
"in",
"ARC_NODE_ELEMENT_DEFINITIONS",
".",
"items",
"(",
")",
":",
"if",
"value",
"[",
"'milestone'",
"]",
":",
"arc_root",
".",
"add_child",
"(",
"arc_element_type",
"=",
"key",
",",
"description",
"=",
"value",
"[",
"'template_description'",
"]",
")",
"arc_root",
".",
"refresh_from_db",
"(",
")",
"return",
"ArcElementNode",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"arc_root",
".",
"pk",
")",
".",
"get_children",
"(",
")",
".",
"count",
"(",
")"
] |
Generate a seven point template in this arc. Arc must be empty.
|
[
"Generate",
"a",
"seven",
"point",
"template",
"in",
"this",
"arc",
".",
"Arc",
"must",
"be",
"empty",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L602-L619
|
241,682
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
Arc.fetch_arc_errors
|
def fetch_arc_errors(self):
'''
Evaluates the current tree of the arc and provides a list of errors that
the user should correct.
'''
error_list = []
hnode = self.validate_first_element()
if hnode:
error_list.append({'hook_error': hnode})
rnode = self.validate_last_element()
if rnode:
error_list.append({'reso_error': rnode})
try:
self.validate_generations()
except ArcGenerationError as ag:
error_list.append({'generation_error': str(ag)})
milecheck = self.validate_milestones()
if milecheck:
error_list.append({'mseq_error': milecheck})
return error_list
|
python
|
def fetch_arc_errors(self):
'''
Evaluates the current tree of the arc and provides a list of errors that
the user should correct.
'''
error_list = []
hnode = self.validate_first_element()
if hnode:
error_list.append({'hook_error': hnode})
rnode = self.validate_last_element()
if rnode:
error_list.append({'reso_error': rnode})
try:
self.validate_generations()
except ArcGenerationError as ag:
error_list.append({'generation_error': str(ag)})
milecheck = self.validate_milestones()
if milecheck:
error_list.append({'mseq_error': milecheck})
return error_list
|
[
"def",
"fetch_arc_errors",
"(",
"self",
")",
":",
"error_list",
"=",
"[",
"]",
"hnode",
"=",
"self",
".",
"validate_first_element",
"(",
")",
"if",
"hnode",
":",
"error_list",
".",
"append",
"(",
"{",
"'hook_error'",
":",
"hnode",
"}",
")",
"rnode",
"=",
"self",
".",
"validate_last_element",
"(",
")",
"if",
"rnode",
":",
"error_list",
".",
"append",
"(",
"{",
"'reso_error'",
":",
"rnode",
"}",
")",
"try",
":",
"self",
".",
"validate_generations",
"(",
")",
"except",
"ArcGenerationError",
"as",
"ag",
":",
"error_list",
".",
"append",
"(",
"{",
"'generation_error'",
":",
"str",
"(",
"ag",
")",
"}",
")",
"milecheck",
"=",
"self",
".",
"validate_milestones",
"(",
")",
"if",
"milecheck",
":",
"error_list",
".",
"append",
"(",
"{",
"'mseq_error'",
":",
"milecheck",
"}",
")",
"return",
"error_list"
] |
Evaluates the current tree of the arc and provides a list of errors that
the user should correct.
|
[
"Evaluates",
"the",
"current",
"tree",
"of",
"the",
"arc",
"and",
"provides",
"a",
"list",
"of",
"errors",
"that",
"the",
"user",
"should",
"correct",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L621-L640
|
241,683
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
Arc.validate_generations
|
def validate_generations(self):
'''
Make sure that the descendent depth is valid.
'''
nodes = self.arc_root_node.get_descendants()
for node in nodes:
logger.debug("Checking parent for node of type %s" % node.arc_element_type)
parent = ArcElementNode.objects.get(pk=node.pk).get_parent(update=True)
if 'mile' in node.arc_element_type and parent.get_depth() > 1:
logger.debug("Milestone node... with leaf parent")
raise ArcGenerationError(_("Milestones cannot be descendants of anything besides the root!"))
if (parent.get_depth() > 1 and
parent.arc_element_type not in ARC_NODE_ELEMENT_DEFINITIONS[node.arc_element_type]['allowed_parents']):
raise ArcGenerationError(_("Node %s cannot be a descendant of node %s" % (node, parent)))
return None
|
python
|
def validate_generations(self):
'''
Make sure that the descendent depth is valid.
'''
nodes = self.arc_root_node.get_descendants()
for node in nodes:
logger.debug("Checking parent for node of type %s" % node.arc_element_type)
parent = ArcElementNode.objects.get(pk=node.pk).get_parent(update=True)
if 'mile' in node.arc_element_type and parent.get_depth() > 1:
logger.debug("Milestone node... with leaf parent")
raise ArcGenerationError(_("Milestones cannot be descendants of anything besides the root!"))
if (parent.get_depth() > 1 and
parent.arc_element_type not in ARC_NODE_ELEMENT_DEFINITIONS[node.arc_element_type]['allowed_parents']):
raise ArcGenerationError(_("Node %s cannot be a descendant of node %s" % (node, parent)))
return None
|
[
"def",
"validate_generations",
"(",
"self",
")",
":",
"nodes",
"=",
"self",
".",
"arc_root_node",
".",
"get_descendants",
"(",
")",
"for",
"node",
"in",
"nodes",
":",
"logger",
".",
"debug",
"(",
"\"Checking parent for node of type %s\"",
"%",
"node",
".",
"arc_element_type",
")",
"parent",
"=",
"ArcElementNode",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"node",
".",
"pk",
")",
".",
"get_parent",
"(",
"update",
"=",
"True",
")",
"if",
"'mile'",
"in",
"node",
".",
"arc_element_type",
"and",
"parent",
".",
"get_depth",
"(",
")",
">",
"1",
":",
"logger",
".",
"debug",
"(",
"\"Milestone node... with leaf parent\"",
")",
"raise",
"ArcGenerationError",
"(",
"_",
"(",
"\"Milestones cannot be descendants of anything besides the root!\"",
")",
")",
"if",
"(",
"parent",
".",
"get_depth",
"(",
")",
">",
"1",
"and",
"parent",
".",
"arc_element_type",
"not",
"in",
"ARC_NODE_ELEMENT_DEFINITIONS",
"[",
"node",
".",
"arc_element_type",
"]",
"[",
"'allowed_parents'",
"]",
")",
":",
"raise",
"ArcGenerationError",
"(",
"_",
"(",
"\"Node %s cannot be a descendant of node %s\"",
"%",
"(",
"node",
",",
"parent",
")",
")",
")",
"return",
"None"
] |
Make sure that the descendent depth is valid.
|
[
"Make",
"sure",
"that",
"the",
"descendent",
"depth",
"is",
"valid",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L660-L674
|
241,684
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
Arc.validate_milestones
|
def validate_milestones(self):
'''
Reviews the arc element tree to ensure that milestones appear in the right
order.
'''
milestones = self.arc_root_node.get_children().filter(arc_element_type__contains='mile')
current_cursor = 0
for mile in milestones:
seq = mile.milestone_seq
if seq < current_cursor:
return mile
current_cursor = seq
return None
|
python
|
def validate_milestones(self):
'''
Reviews the arc element tree to ensure that milestones appear in the right
order.
'''
milestones = self.arc_root_node.get_children().filter(arc_element_type__contains='mile')
current_cursor = 0
for mile in milestones:
seq = mile.milestone_seq
if seq < current_cursor:
return mile
current_cursor = seq
return None
|
[
"def",
"validate_milestones",
"(",
"self",
")",
":",
"milestones",
"=",
"self",
".",
"arc_root_node",
".",
"get_children",
"(",
")",
".",
"filter",
"(",
"arc_element_type__contains",
"=",
"'mile'",
")",
"current_cursor",
"=",
"0",
"for",
"mile",
"in",
"milestones",
":",
"seq",
"=",
"mile",
".",
"milestone_seq",
"if",
"seq",
"<",
"current_cursor",
":",
"return",
"mile",
"current_cursor",
"=",
"seq",
"return",
"None"
] |
Reviews the arc element tree to ensure that milestones appear in the right
order.
|
[
"Reviews",
"the",
"arc",
"element",
"tree",
"to",
"ensure",
"that",
"milestones",
"appear",
"in",
"the",
"right",
"order",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L676-L688
|
241,685
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
StoryElementNode.all_characters
|
def all_characters(self):
'''
Returns a queryset of all characters associated with this node and its descendants,
excluding any duplicates.
'''
qs = self.assoc_characters.all()
for node in self.get_descendants():
qs2 = node.assoc_characters.all()
qs = qs.union(qs2).distinct('pk')
return qs
|
python
|
def all_characters(self):
'''
Returns a queryset of all characters associated with this node and its descendants,
excluding any duplicates.
'''
qs = self.assoc_characters.all()
for node in self.get_descendants():
qs2 = node.assoc_characters.all()
qs = qs.union(qs2).distinct('pk')
return qs
|
[
"def",
"all_characters",
"(",
"self",
")",
":",
"qs",
"=",
"self",
".",
"assoc_characters",
".",
"all",
"(",
")",
"for",
"node",
"in",
"self",
".",
"get_descendants",
"(",
")",
":",
"qs2",
"=",
"node",
".",
"assoc_characters",
".",
"all",
"(",
")",
"qs",
"=",
"qs",
".",
"union",
"(",
"qs2",
")",
".",
"distinct",
"(",
"'pk'",
")",
"return",
"qs"
] |
Returns a queryset of all characters associated with this node and its descendants,
excluding any duplicates.
|
[
"Returns",
"a",
"queryset",
"of",
"all",
"characters",
"associated",
"with",
"this",
"node",
"and",
"its",
"descendants",
"excluding",
"any",
"duplicates",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L817-L826
|
241,686
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
StoryElementNode.impact_rating
|
def impact_rating(self):
'''
Returns the impact rating for this node. Impact rating is a measure
of how powerful this moment in the story is by evaluting how many simultaneous
arc elements are associated with it. There is also a generational bleed element,
where the impact score creates shockwaves throughout their direct ancestor and
descendant nodes. This echo fades fast, but the bigger the impact, the farther
it goes.
Currently, the impact bleed does not extend to sibling nodes.
WARNING: Here be dragons.
'''
if self.depth == 1:
logger.debug('Root node. Skipping.')
return 0 # pragma: no cover
impact_bleed = {
'mile': 0.5, # A milestone extends it's influence by 50% per generation
'tf_beat': 0.25,
}
inherited_impact = 0
base_impact, add_impact, mile_impact = self._local_impact_rating()
local_impact = base_impact + add_impact + mile_impact
logger.debug("Local impact is %f" % local_impact)
parents = self.get_ancestors().filter(depth__gt=1)
children = self.get_descendants()
logger.debug('Found %d parents and %d children' % (parents.count(), children.count()))
for node in parents | children:
if node.depth == 1:
logger.debug("Skipping root node...")
else:
logger.debug('Checking a related node...')
b, a, m = node._local_impact_rating()
logger.debug('Related node has %f of additional impact and %f of milestone impact.' % (a, m))
if (a + m) > 0:
if node.depth > self.depth:
depth_diff = node.depth - self.depth
else:
depth_diff = self.depth - node.depth
logger.debug('There is a generational difference of %f. Adjusting impact bleed.' % depth_diff)
for x in range(depth_diff):
a = a * impact_bleed['tf_beat']
m = m * impact_bleed['mile']
logger.debug('Additional impact bleed of %f. Milestone impact bleed of %f' % (a, m))
inherited_impact += a + m
logger.debug('Final impact bleed of %f. Adding to inherited impact.' % inherited_impact)
else:
logger.debug('Node had 0 bleedworthy impact. Skipping...')
logger.debug('Inherited impact of %f. Adding to local impact of %f' % (inherited_impact, local_impact))
return local_impact + inherited_impact
|
python
|
def impact_rating(self):
'''
Returns the impact rating for this node. Impact rating is a measure
of how powerful this moment in the story is by evaluting how many simultaneous
arc elements are associated with it. There is also a generational bleed element,
where the impact score creates shockwaves throughout their direct ancestor and
descendant nodes. This echo fades fast, but the bigger the impact, the farther
it goes.
Currently, the impact bleed does not extend to sibling nodes.
WARNING: Here be dragons.
'''
if self.depth == 1:
logger.debug('Root node. Skipping.')
return 0 # pragma: no cover
impact_bleed = {
'mile': 0.5, # A milestone extends it's influence by 50% per generation
'tf_beat': 0.25,
}
inherited_impact = 0
base_impact, add_impact, mile_impact = self._local_impact_rating()
local_impact = base_impact + add_impact + mile_impact
logger.debug("Local impact is %f" % local_impact)
parents = self.get_ancestors().filter(depth__gt=1)
children = self.get_descendants()
logger.debug('Found %d parents and %d children' % (parents.count(), children.count()))
for node in parents | children:
if node.depth == 1:
logger.debug("Skipping root node...")
else:
logger.debug('Checking a related node...')
b, a, m = node._local_impact_rating()
logger.debug('Related node has %f of additional impact and %f of milestone impact.' % (a, m))
if (a + m) > 0:
if node.depth > self.depth:
depth_diff = node.depth - self.depth
else:
depth_diff = self.depth - node.depth
logger.debug('There is a generational difference of %f. Adjusting impact bleed.' % depth_diff)
for x in range(depth_diff):
a = a * impact_bleed['tf_beat']
m = m * impact_bleed['mile']
logger.debug('Additional impact bleed of %f. Milestone impact bleed of %f' % (a, m))
inherited_impact += a + m
logger.debug('Final impact bleed of %f. Adding to inherited impact.' % inherited_impact)
else:
logger.debug('Node had 0 bleedworthy impact. Skipping...')
logger.debug('Inherited impact of %f. Adding to local impact of %f' % (inherited_impact, local_impact))
return local_impact + inherited_impact
|
[
"def",
"impact_rating",
"(",
"self",
")",
":",
"if",
"self",
".",
"depth",
"==",
"1",
":",
"logger",
".",
"debug",
"(",
"'Root node. Skipping.'",
")",
"return",
"0",
"# pragma: no cover",
"impact_bleed",
"=",
"{",
"'mile'",
":",
"0.5",
",",
"# A milestone extends it's influence by 50% per generation",
"'tf_beat'",
":",
"0.25",
",",
"}",
"inherited_impact",
"=",
"0",
"base_impact",
",",
"add_impact",
",",
"mile_impact",
"=",
"self",
".",
"_local_impact_rating",
"(",
")",
"local_impact",
"=",
"base_impact",
"+",
"add_impact",
"+",
"mile_impact",
"logger",
".",
"debug",
"(",
"\"Local impact is %f\"",
"%",
"local_impact",
")",
"parents",
"=",
"self",
".",
"get_ancestors",
"(",
")",
".",
"filter",
"(",
"depth__gt",
"=",
"1",
")",
"children",
"=",
"self",
".",
"get_descendants",
"(",
")",
"logger",
".",
"debug",
"(",
"'Found %d parents and %d children'",
"%",
"(",
"parents",
".",
"count",
"(",
")",
",",
"children",
".",
"count",
"(",
")",
")",
")",
"for",
"node",
"in",
"parents",
"|",
"children",
":",
"if",
"node",
".",
"depth",
"==",
"1",
":",
"logger",
".",
"debug",
"(",
"\"Skipping root node...\"",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Checking a related node...'",
")",
"b",
",",
"a",
",",
"m",
"=",
"node",
".",
"_local_impact_rating",
"(",
")",
"logger",
".",
"debug",
"(",
"'Related node has %f of additional impact and %f of milestone impact.'",
"%",
"(",
"a",
",",
"m",
")",
")",
"if",
"(",
"a",
"+",
"m",
")",
">",
"0",
":",
"if",
"node",
".",
"depth",
">",
"self",
".",
"depth",
":",
"depth_diff",
"=",
"node",
".",
"depth",
"-",
"self",
".",
"depth",
"else",
":",
"depth_diff",
"=",
"self",
".",
"depth",
"-",
"node",
".",
"depth",
"logger",
".",
"debug",
"(",
"'There is a generational difference of %f. Adjusting impact bleed.'",
"%",
"depth_diff",
")",
"for",
"x",
"in",
"range",
"(",
"depth_diff",
")",
":",
"a",
"=",
"a",
"*",
"impact_bleed",
"[",
"'tf_beat'",
"]",
"m",
"=",
"m",
"*",
"impact_bleed",
"[",
"'mile'",
"]",
"logger",
".",
"debug",
"(",
"'Additional impact bleed of %f. Milestone impact bleed of %f'",
"%",
"(",
"a",
",",
"m",
")",
")",
"inherited_impact",
"+=",
"a",
"+",
"m",
"logger",
".",
"debug",
"(",
"'Final impact bleed of %f. Adding to inherited impact.'",
"%",
"inherited_impact",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Node had 0 bleedworthy impact. Skipping...'",
")",
"logger",
".",
"debug",
"(",
"'Inherited impact of %f. Adding to local impact of %f'",
"%",
"(",
"inherited_impact",
",",
"local_impact",
")",
")",
"return",
"local_impact",
"+",
"inherited_impact"
] |
Returns the impact rating for this node. Impact rating is a measure
of how powerful this moment in the story is by evaluting how many simultaneous
arc elements are associated with it. There is also a generational bleed element,
where the impact score creates shockwaves throughout their direct ancestor and
descendant nodes. This echo fades fast, but the bigger the impact, the farther
it goes.
Currently, the impact bleed does not extend to sibling nodes.
WARNING: Here be dragons.
|
[
"Returns",
"the",
"impact",
"rating",
"for",
"this",
"node",
".",
"Impact",
"rating",
"is",
"a",
"measure",
"of",
"how",
"powerful",
"this",
"moment",
"in",
"the",
"story",
"is",
"by",
"evaluting",
"how",
"many",
"simultaneous",
"arc",
"elements",
"are",
"associated",
"with",
"it",
".",
"There",
"is",
"also",
"a",
"generational",
"bleed",
"element",
"where",
"the",
"impact",
"score",
"creates",
"shockwaves",
"throughout",
"their",
"direct",
"ancestor",
"and",
"descendant",
"nodes",
".",
"This",
"echo",
"fades",
"fast",
"but",
"the",
"bigger",
"the",
"impact",
"the",
"farther",
"it",
"goes",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L829-L878
|
241,687
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
StoryElementNode.all_locations
|
def all_locations(self):
'''
Returns a queryset of all locations associated with this node and its descendants,
excluding any duplicates.
'''
qs = self.assoc_locations.all()
for node in self.get_descendants():
qs2 = node.assoc_locations.all()
qs = qs.union(qs2).distinct('pk')
return qs
|
python
|
def all_locations(self):
'''
Returns a queryset of all locations associated with this node and its descendants,
excluding any duplicates.
'''
qs = self.assoc_locations.all()
for node in self.get_descendants():
qs2 = node.assoc_locations.all()
qs = qs.union(qs2).distinct('pk')
return qs
|
[
"def",
"all_locations",
"(",
"self",
")",
":",
"qs",
"=",
"self",
".",
"assoc_locations",
".",
"all",
"(",
")",
"for",
"node",
"in",
"self",
".",
"get_descendants",
"(",
")",
":",
"qs2",
"=",
"node",
".",
"assoc_locations",
".",
"all",
"(",
")",
"qs",
"=",
"qs",
".",
"union",
"(",
"qs2",
")",
".",
"distinct",
"(",
"'pk'",
")",
"return",
"qs"
] |
Returns a queryset of all locations associated with this node and its descendants,
excluding any duplicates.
|
[
"Returns",
"a",
"queryset",
"of",
"all",
"locations",
"associated",
"with",
"this",
"node",
"and",
"its",
"descendants",
"excluding",
"any",
"duplicates",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L927-L936
|
241,688
|
maceoutliner/django-fiction-outlines
|
fiction_outlines/models.py
|
StoryElementNode.move
|
def move(self, target, pos=None):
'''
An override of the treebeard api in order to send a signal in advance.
'''
if self.outline != target.outline:
raise IntegrityError('Elements must be from the same outline!')
tree_manipulation.send(
sender=self.__class__,
instance=self,
action='move',
target_node_type=None,
target_node=target,
pos=pos
)
return super().move(target, pos)
|
python
|
def move(self, target, pos=None):
'''
An override of the treebeard api in order to send a signal in advance.
'''
if self.outline != target.outline:
raise IntegrityError('Elements must be from the same outline!')
tree_manipulation.send(
sender=self.__class__,
instance=self,
action='move',
target_node_type=None,
target_node=target,
pos=pos
)
return super().move(target, pos)
|
[
"def",
"move",
"(",
"self",
",",
"target",
",",
"pos",
"=",
"None",
")",
":",
"if",
"self",
".",
"outline",
"!=",
"target",
".",
"outline",
":",
"raise",
"IntegrityError",
"(",
"'Elements must be from the same outline!'",
")",
"tree_manipulation",
".",
"send",
"(",
"sender",
"=",
"self",
".",
"__class__",
",",
"instance",
"=",
"self",
",",
"action",
"=",
"'move'",
",",
"target_node_type",
"=",
"None",
",",
"target_node",
"=",
"target",
",",
"pos",
"=",
"pos",
")",
"return",
"super",
"(",
")",
".",
"move",
"(",
"target",
",",
"pos",
")"
] |
An override of the treebeard api in order to send a signal in advance.
|
[
"An",
"override",
"of",
"the",
"treebeard",
"api",
"in",
"order",
"to",
"send",
"a",
"signal",
"in",
"advance",
"."
] |
6c58e356af3fbe7b23557643ba27e46eaef9d4e3
|
https://github.com/maceoutliner/django-fiction-outlines/blob/6c58e356af3fbe7b23557643ba27e46eaef9d4e3/fiction_outlines/models.py#L938-L952
|
241,689
|
stefankoegl/bitlove-python
|
bitlove.py
|
BitloveClient.get_by_enclosures
|
def get_by_enclosures(self, enclosure_urls):
""" Get bitlove data for a list of enclosure URLs """
# prepare URLs
enclosure_urls = map(str.strip, enclosure_urls)
enclosure_urls = filter(None, enclosure_urls)
return BitloveResponse(self.opener, enclosure_urls)
|
python
|
def get_by_enclosures(self, enclosure_urls):
""" Get bitlove data for a list of enclosure URLs """
# prepare URLs
enclosure_urls = map(str.strip, enclosure_urls)
enclosure_urls = filter(None, enclosure_urls)
return BitloveResponse(self.opener, enclosure_urls)
|
[
"def",
"get_by_enclosures",
"(",
"self",
",",
"enclosure_urls",
")",
":",
"# prepare URLs",
"enclosure_urls",
"=",
"map",
"(",
"str",
".",
"strip",
",",
"enclosure_urls",
")",
"enclosure_urls",
"=",
"filter",
"(",
"None",
",",
"enclosure_urls",
")",
"return",
"BitloveResponse",
"(",
"self",
".",
"opener",
",",
"enclosure_urls",
")"
] |
Get bitlove data for a list of enclosure URLs
|
[
"Get",
"bitlove",
"data",
"for",
"a",
"list",
"of",
"enclosure",
"URLs"
] |
4ca4a3fe8d115782876f9e7ee5deac66119cc410
|
https://github.com/stefankoegl/bitlove-python/blob/4ca4a3fe8d115782876f9e7ee5deac66119cc410/bitlove.py#L71-L78
|
241,690
|
stefankoegl/bitlove-python
|
bitlove.py
|
BitloveResponse.get
|
def get(self, url):
""" Get the response for the given enclosure URL """
self._query()
return Enclosure(self._resp.get(url), url)
|
python
|
def get(self, url):
""" Get the response for the given enclosure URL """
self._query()
return Enclosure(self._resp.get(url), url)
|
[
"def",
"get",
"(",
"self",
",",
"url",
")",
":",
"self",
".",
"_query",
"(",
")",
"return",
"Enclosure",
"(",
"self",
".",
"_resp",
".",
"get",
"(",
"url",
")",
",",
"url",
")"
] |
Get the response for the given enclosure URL
|
[
"Get",
"the",
"response",
"for",
"the",
"given",
"enclosure",
"URL"
] |
4ca4a3fe8d115782876f9e7ee5deac66119cc410
|
https://github.com/stefankoegl/bitlove-python/blob/4ca4a3fe8d115782876f9e7ee5deac66119cc410/bitlove.py#L90-L93
|
241,691
|
stefankoegl/bitlove-python
|
bitlove.py
|
BitloveResponse._query
|
def _query(self):
""" perform a request to the API, only when necessary """
if not self.urls:
self._resp = {}
elif self._resp is None:
params = [ ('url', url) for url in self.urls]
query = urllib.urlencode(params)
# query API
r = self.opener.open(BITLOVE_ENCLOSURE_API + query)
self._resp = json.loads(r.read())
|
python
|
def _query(self):
""" perform a request to the API, only when necessary """
if not self.urls:
self._resp = {}
elif self._resp is None:
params = [ ('url', url) for url in self.urls]
query = urllib.urlencode(params)
# query API
r = self.opener.open(BITLOVE_ENCLOSURE_API + query)
self._resp = json.loads(r.read())
|
[
"def",
"_query",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"urls",
":",
"self",
".",
"_resp",
"=",
"{",
"}",
"elif",
"self",
".",
"_resp",
"is",
"None",
":",
"params",
"=",
"[",
"(",
"'url'",
",",
"url",
")",
"for",
"url",
"in",
"self",
".",
"urls",
"]",
"query",
"=",
"urllib",
".",
"urlencode",
"(",
"params",
")",
"# query API",
"r",
"=",
"self",
".",
"opener",
".",
"open",
"(",
"BITLOVE_ENCLOSURE_API",
"+",
"query",
")",
"self",
".",
"_resp",
"=",
"json",
".",
"loads",
"(",
"r",
".",
"read",
"(",
")",
")"
] |
perform a request to the API, only when necessary
|
[
"perform",
"a",
"request",
"to",
"the",
"API",
"only",
"when",
"necessary"
] |
4ca4a3fe8d115782876f9e7ee5deac66119cc410
|
https://github.com/stefankoegl/bitlove-python/blob/4ca4a3fe8d115782876f9e7ee5deac66119cc410/bitlove.py#L102-L114
|
241,692
|
iamFIREcracker/aadbook
|
aadbook/aadbook.py
|
_build_parser
|
def _build_parser():
"""
Return a command-line arguments parser.
"""
parser = argparse.ArgumentParser(description='Search you Azure AD contacts from mutt or the command-line.')
parser.add_argument('-c', '--config', help='Specify alternative configuration file.', metavar="FILE")
parser.add_argument('-v', '--verbose', dest="log_level", action='store_const',
const=logging.INFO, help='Be verbose about what is going on (stderr).')
parser.add_argument('-V', '--version',
action='version',
version='%%(prog)s %s' % pkg_resources.get_distribution("aadbook").version,
help="Print version and exit")
parser.add_argument('-d', '--debug', dest="log_level", action='store_const',
const=logging.DEBUG, help='Output debug info (stderr).')
parser.set_defaults(config=CONFIG_FILE, log_level=logging.ERROR)
subparsers = parser.add_subparsers()
parser_config_template = subparsers.add_parser('config-template',
description='Prints a template for .aadbookrc to stdout')
parser_config_template.set_defaults(func=do_config_template)
parser_reload = subparsers.add_parser('authenticate',
description='Azure AD authentication.')
parser_reload.set_defaults(func=do_authenticate)
parser_reload = subparsers.add_parser('reload',
description='Force reload of the cache.')
parser_reload.set_defaults(func=do_reload)
parser_query = subparsers.add_parser('query',
description='Search contacts using query (regex).')
parser_query.add_argument('query', help='regex to search for.', metavar='QUERY')
parser_query.set_defaults(func=do_query)
return parser
|
python
|
def _build_parser():
"""
Return a command-line arguments parser.
"""
parser = argparse.ArgumentParser(description='Search you Azure AD contacts from mutt or the command-line.')
parser.add_argument('-c', '--config', help='Specify alternative configuration file.', metavar="FILE")
parser.add_argument('-v', '--verbose', dest="log_level", action='store_const',
const=logging.INFO, help='Be verbose about what is going on (stderr).')
parser.add_argument('-V', '--version',
action='version',
version='%%(prog)s %s' % pkg_resources.get_distribution("aadbook").version,
help="Print version and exit")
parser.add_argument('-d', '--debug', dest="log_level", action='store_const',
const=logging.DEBUG, help='Output debug info (stderr).')
parser.set_defaults(config=CONFIG_FILE, log_level=logging.ERROR)
subparsers = parser.add_subparsers()
parser_config_template = subparsers.add_parser('config-template',
description='Prints a template for .aadbookrc to stdout')
parser_config_template.set_defaults(func=do_config_template)
parser_reload = subparsers.add_parser('authenticate',
description='Azure AD authentication.')
parser_reload.set_defaults(func=do_authenticate)
parser_reload = subparsers.add_parser('reload',
description='Force reload of the cache.')
parser_reload.set_defaults(func=do_reload)
parser_query = subparsers.add_parser('query',
description='Search contacts using query (regex).')
parser_query.add_argument('query', help='regex to search for.', metavar='QUERY')
parser_query.set_defaults(func=do_query)
return parser
|
[
"def",
"_build_parser",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'Search you Azure AD contacts from mutt or the command-line.'",
")",
"parser",
".",
"add_argument",
"(",
"'-c'",
",",
"'--config'",
",",
"help",
"=",
"'Specify alternative configuration file.'",
",",
"metavar",
"=",
"\"FILE\"",
")",
"parser",
".",
"add_argument",
"(",
"'-v'",
",",
"'--verbose'",
",",
"dest",
"=",
"\"log_level\"",
",",
"action",
"=",
"'store_const'",
",",
"const",
"=",
"logging",
".",
"INFO",
",",
"help",
"=",
"'Be verbose about what is going on (stderr).'",
")",
"parser",
".",
"add_argument",
"(",
"'-V'",
",",
"'--version'",
",",
"action",
"=",
"'version'",
",",
"version",
"=",
"'%%(prog)s %s'",
"%",
"pkg_resources",
".",
"get_distribution",
"(",
"\"aadbook\"",
")",
".",
"version",
",",
"help",
"=",
"\"Print version and exit\"",
")",
"parser",
".",
"add_argument",
"(",
"'-d'",
",",
"'--debug'",
",",
"dest",
"=",
"\"log_level\"",
",",
"action",
"=",
"'store_const'",
",",
"const",
"=",
"logging",
".",
"DEBUG",
",",
"help",
"=",
"'Output debug info (stderr).'",
")",
"parser",
".",
"set_defaults",
"(",
"config",
"=",
"CONFIG_FILE",
",",
"log_level",
"=",
"logging",
".",
"ERROR",
")",
"subparsers",
"=",
"parser",
".",
"add_subparsers",
"(",
")",
"parser_config_template",
"=",
"subparsers",
".",
"add_parser",
"(",
"'config-template'",
",",
"description",
"=",
"'Prints a template for .aadbookrc to stdout'",
")",
"parser_config_template",
".",
"set_defaults",
"(",
"func",
"=",
"do_config_template",
")",
"parser_reload",
"=",
"subparsers",
".",
"add_parser",
"(",
"'authenticate'",
",",
"description",
"=",
"'Azure AD authentication.'",
")",
"parser_reload",
".",
"set_defaults",
"(",
"func",
"=",
"do_authenticate",
")",
"parser_reload",
"=",
"subparsers",
".",
"add_parser",
"(",
"'reload'",
",",
"description",
"=",
"'Force reload of the cache.'",
")",
"parser_reload",
".",
"set_defaults",
"(",
"func",
"=",
"do_reload",
")",
"parser_query",
"=",
"subparsers",
".",
"add_parser",
"(",
"'query'",
",",
"description",
"=",
"'Search contacts using query (regex).'",
")",
"parser_query",
".",
"add_argument",
"(",
"'query'",
",",
"help",
"=",
"'regex to search for.'",
",",
"metavar",
"=",
"'QUERY'",
")",
"parser_query",
".",
"set_defaults",
"(",
"func",
"=",
"do_query",
")",
"return",
"parser"
] |
Return a command-line arguments parser.
|
[
"Return",
"a",
"command",
"-",
"line",
"arguments",
"parser",
"."
] |
d191e9d36a2309449ab91c1728eaf5901b7ef91c
|
https://github.com/iamFIREcracker/aadbook/blob/d191e9d36a2309449ab91c1728eaf5901b7ef91c/aadbook/aadbook.py#L40-L75
|
241,693
|
krinj/k-util
|
setup.py
|
find_packages_under
|
def find_packages_under(path):
""" Recursive list all of the packages under a specific package."""
all_packages = setuptools.find_packages()
packages = []
for package in all_packages:
package_split = package.split(".")
if package_split[0] == path:
packages.append(package)
return packages
|
python
|
def find_packages_under(path):
""" Recursive list all of the packages under a specific package."""
all_packages = setuptools.find_packages()
packages = []
for package in all_packages:
package_split = package.split(".")
if package_split[0] == path:
packages.append(package)
return packages
|
[
"def",
"find_packages_under",
"(",
"path",
")",
":",
"all_packages",
"=",
"setuptools",
".",
"find_packages",
"(",
")",
"packages",
"=",
"[",
"]",
"for",
"package",
"in",
"all_packages",
":",
"package_split",
"=",
"package",
".",
"split",
"(",
"\".\"",
")",
"if",
"package_split",
"[",
"0",
"]",
"==",
"path",
":",
"packages",
".",
"append",
"(",
"package",
")",
"return",
"packages"
] |
Recursive list all of the packages under a specific package.
|
[
"Recursive",
"list",
"all",
"of",
"the",
"packages",
"under",
"a",
"specific",
"package",
"."
] |
b118826b1d6f49ca4e1ca7327d5b171db332ac23
|
https://github.com/krinj/k-util/blob/b118826b1d6f49ca4e1ca7327d5b171db332ac23/setup.py#L25-L33
|
241,694
|
krinj/k-util
|
setup.py
|
copy_version_to_package
|
def copy_version_to_package(path):
""" Copy the single source of truth version number into the package as well. """
init_file = os.path.join(path, "__init__.py")
with open(init_file, "r") as original_file:
lines = original_file.readlines()
with open(init_file, "w") as new_file:
for line in lines:
if "__version__" not in line:
new_file.write(line)
else:
new_file.write("__version__ = \"{}\"\n".format(VERSION))
|
python
|
def copy_version_to_package(path):
""" Copy the single source of truth version number into the package as well. """
init_file = os.path.join(path, "__init__.py")
with open(init_file, "r") as original_file:
lines = original_file.readlines()
with open(init_file, "w") as new_file:
for line in lines:
if "__version__" not in line:
new_file.write(line)
else:
new_file.write("__version__ = \"{}\"\n".format(VERSION))
|
[
"def",
"copy_version_to_package",
"(",
"path",
")",
":",
"init_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"\"__init__.py\"",
")",
"with",
"open",
"(",
"init_file",
",",
"\"r\"",
")",
"as",
"original_file",
":",
"lines",
"=",
"original_file",
".",
"readlines",
"(",
")",
"with",
"open",
"(",
"init_file",
",",
"\"w\"",
")",
"as",
"new_file",
":",
"for",
"line",
"in",
"lines",
":",
"if",
"\"__version__\"",
"not",
"in",
"line",
":",
"new_file",
".",
"write",
"(",
"line",
")",
"else",
":",
"new_file",
".",
"write",
"(",
"\"__version__ = \\\"{}\\\"\\n\"",
".",
"format",
"(",
"VERSION",
")",
")"
] |
Copy the single source of truth version number into the package as well.
|
[
"Copy",
"the",
"single",
"source",
"of",
"truth",
"version",
"number",
"into",
"the",
"package",
"as",
"well",
"."
] |
b118826b1d6f49ca4e1ca7327d5b171db332ac23
|
https://github.com/krinj/k-util/blob/b118826b1d6f49ca4e1ca7327d5b171db332ac23/setup.py#L36-L47
|
241,695
|
privacee/freelan-configurator
|
freelan_configurator/freelan_cfg.py
|
FreelanCFG.validate
|
def validate(self):
"""Validation of configuration to check for required values"""
if not self.server.enabled:
if self.security.signature_certificate_file is self.security.defaults['signature_certificate_file']:
print("ISSUE: If you are not configuring a server, you need to set 'signature_certificate_file'")
if self.security.signature_private_key_file is self.security.defaults['signature_private_key_file']:
print("ISSUE: If you are not configuring a server, you need to set 'signature_private_key_file'")
else:
if self.client.enabled:
print("ISSUE: Client and server enabled at the same time?")
if self.server.protocol is self.server.defaults['protocol']:
if self.server.server_certificate_file is self.server.defaults['server_certificate_file'] or \
self.server.server_private_key_file is self.server.defaults['server_private_key_file']:
print("ISSUE: 'server_certificate_file' and/or 'server_private_key_file' are not configured and will be auto-generated.")
if self.server.certification_authority_certificate_file is self.server.defaults['certification_authority_certificate_file'] or \
self.server.certification_authority_private_key_file is self.server.defaults['certification_authority_private_key_file']:
print("ISSUE: 'certification_authority_certificate_file' and/or 'certification_authority_private_key_file' are not configured and will be auto-generated - this is NOT recommended.")
if self.server.authentication_script is self.server.defaults['authentication_script']:
print("ISSUE: No 'authentication_script' has been provided and all authentication requests will be rejected!")
if self.client.enabled:
if self.client.server_endpoint is self.client.defaults['server_endpoint']:
print("ISSUE: You are running in client mode, but you are using a default server address.")
if not self.client.disable_peer_verification is self.client.defaults['disable_peer_verification'] or \
not self.client.disable_host_verification is self.client.defaults['disable_host_verification']:
print("ISSUE: Disabling peer/host verification is NOT recommended - AT ALL.")
if self.client.username is self.client.defaults['username'] or \
self.client.password is self.client.defaults['password']:
print("ISSUE: No username and/or password has been configured for a client.")
if self.fscp.contact is self.fscp.defaults['contact']:
if not self.server.enabled and not self.client.enabled:
print("ISSUE: You have not defined any contact points while you are neither running as server nor client.")
## hostname_resolution_protocol=ipv4/ipv6
## ipv4_address_prefix_length=9.0.0.1/24
## ipv6_address_prefix_length=2aa1::1/8
if self.security.authority_certificate_file is self.security.defaults['authority_certificate_file']:
print("ISSUE: You need to set 'authority_certificate_file'")
if self.tap_adapter.ipv4_address_prefix_length is self.tap_adapter.defaults['ipv4_address_prefix_length']:
print("ISSUE: You are using the default network address - make sure you set a different ip for every machine 'ipv4_address_prefix_length'")
|
python
|
def validate(self):
"""Validation of configuration to check for required values"""
if not self.server.enabled:
if self.security.signature_certificate_file is self.security.defaults['signature_certificate_file']:
print("ISSUE: If you are not configuring a server, you need to set 'signature_certificate_file'")
if self.security.signature_private_key_file is self.security.defaults['signature_private_key_file']:
print("ISSUE: If you are not configuring a server, you need to set 'signature_private_key_file'")
else:
if self.client.enabled:
print("ISSUE: Client and server enabled at the same time?")
if self.server.protocol is self.server.defaults['protocol']:
if self.server.server_certificate_file is self.server.defaults['server_certificate_file'] or \
self.server.server_private_key_file is self.server.defaults['server_private_key_file']:
print("ISSUE: 'server_certificate_file' and/or 'server_private_key_file' are not configured and will be auto-generated.")
if self.server.certification_authority_certificate_file is self.server.defaults['certification_authority_certificate_file'] or \
self.server.certification_authority_private_key_file is self.server.defaults['certification_authority_private_key_file']:
print("ISSUE: 'certification_authority_certificate_file' and/or 'certification_authority_private_key_file' are not configured and will be auto-generated - this is NOT recommended.")
if self.server.authentication_script is self.server.defaults['authentication_script']:
print("ISSUE: No 'authentication_script' has been provided and all authentication requests will be rejected!")
if self.client.enabled:
if self.client.server_endpoint is self.client.defaults['server_endpoint']:
print("ISSUE: You are running in client mode, but you are using a default server address.")
if not self.client.disable_peer_verification is self.client.defaults['disable_peer_verification'] or \
not self.client.disable_host_verification is self.client.defaults['disable_host_verification']:
print("ISSUE: Disabling peer/host verification is NOT recommended - AT ALL.")
if self.client.username is self.client.defaults['username'] or \
self.client.password is self.client.defaults['password']:
print("ISSUE: No username and/or password has been configured for a client.")
if self.fscp.contact is self.fscp.defaults['contact']:
if not self.server.enabled and not self.client.enabled:
print("ISSUE: You have not defined any contact points while you are neither running as server nor client.")
## hostname_resolution_protocol=ipv4/ipv6
## ipv4_address_prefix_length=9.0.0.1/24
## ipv6_address_prefix_length=2aa1::1/8
if self.security.authority_certificate_file is self.security.defaults['authority_certificate_file']:
print("ISSUE: You need to set 'authority_certificate_file'")
if self.tap_adapter.ipv4_address_prefix_length is self.tap_adapter.defaults['ipv4_address_prefix_length']:
print("ISSUE: You are using the default network address - make sure you set a different ip for every machine 'ipv4_address_prefix_length'")
|
[
"def",
"validate",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"server",
".",
"enabled",
":",
"if",
"self",
".",
"security",
".",
"signature_certificate_file",
"is",
"self",
".",
"security",
".",
"defaults",
"[",
"'signature_certificate_file'",
"]",
":",
"print",
"(",
"\"ISSUE: If you are not configuring a server, you need to set 'signature_certificate_file'\"",
")",
"if",
"self",
".",
"security",
".",
"signature_private_key_file",
"is",
"self",
".",
"security",
".",
"defaults",
"[",
"'signature_private_key_file'",
"]",
":",
"print",
"(",
"\"ISSUE: If you are not configuring a server, you need to set 'signature_private_key_file'\"",
")",
"else",
":",
"if",
"self",
".",
"client",
".",
"enabled",
":",
"print",
"(",
"\"ISSUE: Client and server enabled at the same time?\"",
")",
"if",
"self",
".",
"server",
".",
"protocol",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'protocol'",
"]",
":",
"if",
"self",
".",
"server",
".",
"server_certificate_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'server_certificate_file'",
"]",
"or",
"self",
".",
"server",
".",
"server_private_key_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'server_private_key_file'",
"]",
":",
"print",
"(",
"\"ISSUE: 'server_certificate_file' and/or 'server_private_key_file' are not configured and will be auto-generated.\"",
")",
"if",
"self",
".",
"server",
".",
"certification_authority_certificate_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'certification_authority_certificate_file'",
"]",
"or",
"self",
".",
"server",
".",
"certification_authority_private_key_file",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'certification_authority_private_key_file'",
"]",
":",
"print",
"(",
"\"ISSUE: 'certification_authority_certificate_file' and/or 'certification_authority_private_key_file' are not configured and will be auto-generated - this is NOT recommended.\"",
")",
"if",
"self",
".",
"server",
".",
"authentication_script",
"is",
"self",
".",
"server",
".",
"defaults",
"[",
"'authentication_script'",
"]",
":",
"print",
"(",
"\"ISSUE: No 'authentication_script' has been provided and all authentication requests will be rejected!\"",
")",
"if",
"self",
".",
"client",
".",
"enabled",
":",
"if",
"self",
".",
"client",
".",
"server_endpoint",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'server_endpoint'",
"]",
":",
"print",
"(",
"\"ISSUE: You are running in client mode, but you are using a default server address.\"",
")",
"if",
"not",
"self",
".",
"client",
".",
"disable_peer_verification",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'disable_peer_verification'",
"]",
"or",
"not",
"self",
".",
"client",
".",
"disable_host_verification",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'disable_host_verification'",
"]",
":",
"print",
"(",
"\"ISSUE: Disabling peer/host verification is NOT recommended - AT ALL.\"",
")",
"if",
"self",
".",
"client",
".",
"username",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'username'",
"]",
"or",
"self",
".",
"client",
".",
"password",
"is",
"self",
".",
"client",
".",
"defaults",
"[",
"'password'",
"]",
":",
"print",
"(",
"\"ISSUE: No username and/or password has been configured for a client.\"",
")",
"if",
"self",
".",
"fscp",
".",
"contact",
"is",
"self",
".",
"fscp",
".",
"defaults",
"[",
"'contact'",
"]",
":",
"if",
"not",
"self",
".",
"server",
".",
"enabled",
"and",
"not",
"self",
".",
"client",
".",
"enabled",
":",
"print",
"(",
"\"ISSUE: You have not defined any contact points while you are neither running as server nor client.\"",
")",
"## hostname_resolution_protocol=ipv4/ipv6",
"## ipv4_address_prefix_length=9.0.0.1/24",
"## ipv6_address_prefix_length=2aa1::1/8",
"if",
"self",
".",
"security",
".",
"authority_certificate_file",
"is",
"self",
".",
"security",
".",
"defaults",
"[",
"'authority_certificate_file'",
"]",
":",
"print",
"(",
"\"ISSUE: You need to set 'authority_certificate_file'\"",
")",
"if",
"self",
".",
"tap_adapter",
".",
"ipv4_address_prefix_length",
"is",
"self",
".",
"tap_adapter",
".",
"defaults",
"[",
"'ipv4_address_prefix_length'",
"]",
":",
"print",
"(",
"\"ISSUE: You are using the default network address - make sure you set a different ip for every machine 'ipv4_address_prefix_length'\"",
")"
] |
Validation of configuration to check for required values
|
[
"Validation",
"of",
"configuration",
"to",
"check",
"for",
"required",
"values"
] |
7c070f8958454792f870ef0d195a7f5da36edb5a
|
https://github.com/privacee/freelan-configurator/blob/7c070f8958454792f870ef0d195a7f5da36edb5a/freelan_configurator/freelan_cfg.py#L192-L247
|
241,696
|
inveniosoftware-attic/invenio-knowledge
|
invenio_knowledge/admin.py
|
register_admin
|
def register_admin(app, admin):
"""Called on app initialization to register administration interface."""
category = 'Knowledge'
admin.category_icon_classes[category] = "fa fa-mortar-board"
admin.add_view(
KnowledgeAdmin(app, KnwKB, db.session,
name='Knowledge Base', category=category,
endpoint="kb")
)
admin.add_view(
KnwKBRVALAdmin(app, KnwKBRVAL, db.session,
name="Knowledge Mappings", category=category,
endpoint="kbrval")
)
|
python
|
def register_admin(app, admin):
"""Called on app initialization to register administration interface."""
category = 'Knowledge'
admin.category_icon_classes[category] = "fa fa-mortar-board"
admin.add_view(
KnowledgeAdmin(app, KnwKB, db.session,
name='Knowledge Base', category=category,
endpoint="kb")
)
admin.add_view(
KnwKBRVALAdmin(app, KnwKBRVAL, db.session,
name="Knowledge Mappings", category=category,
endpoint="kbrval")
)
|
[
"def",
"register_admin",
"(",
"app",
",",
"admin",
")",
":",
"category",
"=",
"'Knowledge'",
"admin",
".",
"category_icon_classes",
"[",
"category",
"]",
"=",
"\"fa fa-mortar-board\"",
"admin",
".",
"add_view",
"(",
"KnowledgeAdmin",
"(",
"app",
",",
"KnwKB",
",",
"db",
".",
"session",
",",
"name",
"=",
"'Knowledge Base'",
",",
"category",
"=",
"category",
",",
"endpoint",
"=",
"\"kb\"",
")",
")",
"admin",
".",
"add_view",
"(",
"KnwKBRVALAdmin",
"(",
"app",
",",
"KnwKBRVAL",
",",
"db",
".",
"session",
",",
"name",
"=",
"\"Knowledge Mappings\"",
",",
"category",
"=",
"category",
",",
"endpoint",
"=",
"\"kbrval\"",
")",
")"
] |
Called on app initialization to register administration interface.
|
[
"Called",
"on",
"app",
"initialization",
"to",
"register",
"administration",
"interface",
"."
] |
b31722dc14243ca8f626f8b3bce9718d0119de55
|
https://github.com/inveniosoftware-attic/invenio-knowledge/blob/b31722dc14243ca8f626f8b3bce9718d0119de55/invenio_knowledge/admin.py#L188-L201
|
241,697
|
inveniosoftware-attic/invenio-knowledge
|
invenio_knowledge/admin.py
|
KnowledgeAdmin.after_model_change
|
def after_model_change(self, form, model, is_created):
"""Save model."""
super(KnowledgeAdmin, self).after_model_change(form, model, is_created)
from invenio_collections.models import Collection
if form.kbtype.data == KnwKB.KNWKB_TYPES['dynamic']:
id_collection = form.id_collection.data or None
collection = Collection.query.filter_by(
id=id_collection).one() if id_collection else None
model.set_dyn_config(
field=form.output_tag.data,
expression=form.search_expression.data,
collection=collection)
if form.kbtype.data == KnwKB.KNWKB_TYPES['taxonomy']:
if form.tfile.data:
file_name = model.get_filename()
file_data = request.files[form.tfile.name].read()
with open(file_name, 'w') as f:
f.write(file_data)
|
python
|
def after_model_change(self, form, model, is_created):
"""Save model."""
super(KnowledgeAdmin, self).after_model_change(form, model, is_created)
from invenio_collections.models import Collection
if form.kbtype.data == KnwKB.KNWKB_TYPES['dynamic']:
id_collection = form.id_collection.data or None
collection = Collection.query.filter_by(
id=id_collection).one() if id_collection else None
model.set_dyn_config(
field=form.output_tag.data,
expression=form.search_expression.data,
collection=collection)
if form.kbtype.data == KnwKB.KNWKB_TYPES['taxonomy']:
if form.tfile.data:
file_name = model.get_filename()
file_data = request.files[form.tfile.name].read()
with open(file_name, 'w') as f:
f.write(file_data)
|
[
"def",
"after_model_change",
"(",
"self",
",",
"form",
",",
"model",
",",
"is_created",
")",
":",
"super",
"(",
"KnowledgeAdmin",
",",
"self",
")",
".",
"after_model_change",
"(",
"form",
",",
"model",
",",
"is_created",
")",
"from",
"invenio_collections",
".",
"models",
"import",
"Collection",
"if",
"form",
".",
"kbtype",
".",
"data",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'dynamic'",
"]",
":",
"id_collection",
"=",
"form",
".",
"id_collection",
".",
"data",
"or",
"None",
"collection",
"=",
"Collection",
".",
"query",
".",
"filter_by",
"(",
"id",
"=",
"id_collection",
")",
".",
"one",
"(",
")",
"if",
"id_collection",
"else",
"None",
"model",
".",
"set_dyn_config",
"(",
"field",
"=",
"form",
".",
"output_tag",
".",
"data",
",",
"expression",
"=",
"form",
".",
"search_expression",
".",
"data",
",",
"collection",
"=",
"collection",
")",
"if",
"form",
".",
"kbtype",
".",
"data",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'taxonomy'",
"]",
":",
"if",
"form",
".",
"tfile",
".",
"data",
":",
"file_name",
"=",
"model",
".",
"get_filename",
"(",
")",
"file_data",
"=",
"request",
".",
"files",
"[",
"form",
".",
"tfile",
".",
"name",
"]",
".",
"read",
"(",
")",
"with",
"open",
"(",
"file_name",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"file_data",
")"
] |
Save model.
|
[
"Save",
"model",
"."
] |
b31722dc14243ca8f626f8b3bce9718d0119de55
|
https://github.com/inveniosoftware-attic/invenio-knowledge/blob/b31722dc14243ca8f626f8b3bce9718d0119de55/invenio_knowledge/admin.py#L104-L125
|
241,698
|
inveniosoftware-attic/invenio-knowledge
|
invenio_knowledge/admin.py
|
KnowledgeAdmin.edit_form
|
def edit_form(self, obj=None):
"""Edit form."""
kbtype = request.args['kbtype'] if 'kbtype' in request.args else 'w'
if kbtype == KnwKB.KNWKB_TYPES['written_as']:
self.form = WrittenAsKnowledgeForm
elif kbtype == KnwKB.KNWKB_TYPES['dynamic']:
self.form = DynamicKnowledgeForm
else:
self.form = TaxonomyKnowledgeForm
form = self.form(obj=obj)
if not form.is_submitted():
# load extra data: obj => form
if kbtype == KnwKB.KNWKB_TYPES['dynamic']:
if obj.kbdefs:
form.id_collection.data = obj.kbdefs.id_collection
form.output_tag.data = obj.kbdefs.output_tag
form.search_expression.data = obj.kbdefs.search_expression
if kbtype == KnwKB.KNWKB_TYPES['taxonomy']:
file_name = obj.get_filename()
if os.path.isfile(file_name):
form.tfile.label.text = form.tfile.label.text + " *"
# TODO add the possibility to download the file
form.tfile.description = _("Already uploaded %(name)s",
name=obj.get_filename())
form.kbtype.data = kbtype
return form
|
python
|
def edit_form(self, obj=None):
"""Edit form."""
kbtype = request.args['kbtype'] if 'kbtype' in request.args else 'w'
if kbtype == KnwKB.KNWKB_TYPES['written_as']:
self.form = WrittenAsKnowledgeForm
elif kbtype == KnwKB.KNWKB_TYPES['dynamic']:
self.form = DynamicKnowledgeForm
else:
self.form = TaxonomyKnowledgeForm
form = self.form(obj=obj)
if not form.is_submitted():
# load extra data: obj => form
if kbtype == KnwKB.KNWKB_TYPES['dynamic']:
if obj.kbdefs:
form.id_collection.data = obj.kbdefs.id_collection
form.output_tag.data = obj.kbdefs.output_tag
form.search_expression.data = obj.kbdefs.search_expression
if kbtype == KnwKB.KNWKB_TYPES['taxonomy']:
file_name = obj.get_filename()
if os.path.isfile(file_name):
form.tfile.label.text = form.tfile.label.text + " *"
# TODO add the possibility to download the file
form.tfile.description = _("Already uploaded %(name)s",
name=obj.get_filename())
form.kbtype.data = kbtype
return form
|
[
"def",
"edit_form",
"(",
"self",
",",
"obj",
"=",
"None",
")",
":",
"kbtype",
"=",
"request",
".",
"args",
"[",
"'kbtype'",
"]",
"if",
"'kbtype'",
"in",
"request",
".",
"args",
"else",
"'w'",
"if",
"kbtype",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'written_as'",
"]",
":",
"self",
".",
"form",
"=",
"WrittenAsKnowledgeForm",
"elif",
"kbtype",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'dynamic'",
"]",
":",
"self",
".",
"form",
"=",
"DynamicKnowledgeForm",
"else",
":",
"self",
".",
"form",
"=",
"TaxonomyKnowledgeForm",
"form",
"=",
"self",
".",
"form",
"(",
"obj",
"=",
"obj",
")",
"if",
"not",
"form",
".",
"is_submitted",
"(",
")",
":",
"# load extra data: obj => form",
"if",
"kbtype",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'dynamic'",
"]",
":",
"if",
"obj",
".",
"kbdefs",
":",
"form",
".",
"id_collection",
".",
"data",
"=",
"obj",
".",
"kbdefs",
".",
"id_collection",
"form",
".",
"output_tag",
".",
"data",
"=",
"obj",
".",
"kbdefs",
".",
"output_tag",
"form",
".",
"search_expression",
".",
"data",
"=",
"obj",
".",
"kbdefs",
".",
"search_expression",
"if",
"kbtype",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'taxonomy'",
"]",
":",
"file_name",
"=",
"obj",
".",
"get_filename",
"(",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"file_name",
")",
":",
"form",
".",
"tfile",
".",
"label",
".",
"text",
"=",
"form",
".",
"tfile",
".",
"label",
".",
"text",
"+",
"\" *\"",
"# TODO add the possibility to download the file",
"form",
".",
"tfile",
".",
"description",
"=",
"_",
"(",
"\"Already uploaded %(name)s\"",
",",
"name",
"=",
"obj",
".",
"get_filename",
"(",
")",
")",
"form",
".",
"kbtype",
".",
"data",
"=",
"kbtype",
"return",
"form"
] |
Edit form.
|
[
"Edit",
"form",
"."
] |
b31722dc14243ca8f626f8b3bce9718d0119de55
|
https://github.com/inveniosoftware-attic/invenio-knowledge/blob/b31722dc14243ca8f626f8b3bce9718d0119de55/invenio_knowledge/admin.py#L127-L158
|
241,699
|
inveniosoftware-attic/invenio-knowledge
|
invenio_knowledge/admin.py
|
KnowledgeAdmin.create_form
|
def create_form(self, obj=None):
"""Create form."""
kbtype = request.args['kbtype'] if 'kbtype' in request.args else 'w'
if kbtype == KnwKB.KNWKB_TYPES['written_as']:
self.form = WrittenAsKnowledgeForm
elif kbtype == KnwKB.KNWKB_TYPES['dynamic']:
self.form = DynamicKnowledgeForm
else:
self.form = TaxonomyKnowledgeForm
form = self.form()
form.kbtype.data = kbtype
return form
|
python
|
def create_form(self, obj=None):
"""Create form."""
kbtype = request.args['kbtype'] if 'kbtype' in request.args else 'w'
if kbtype == KnwKB.KNWKB_TYPES['written_as']:
self.form = WrittenAsKnowledgeForm
elif kbtype == KnwKB.KNWKB_TYPES['dynamic']:
self.form = DynamicKnowledgeForm
else:
self.form = TaxonomyKnowledgeForm
form = self.form()
form.kbtype.data = kbtype
return form
|
[
"def",
"create_form",
"(",
"self",
",",
"obj",
"=",
"None",
")",
":",
"kbtype",
"=",
"request",
".",
"args",
"[",
"'kbtype'",
"]",
"if",
"'kbtype'",
"in",
"request",
".",
"args",
"else",
"'w'",
"if",
"kbtype",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'written_as'",
"]",
":",
"self",
".",
"form",
"=",
"WrittenAsKnowledgeForm",
"elif",
"kbtype",
"==",
"KnwKB",
".",
"KNWKB_TYPES",
"[",
"'dynamic'",
"]",
":",
"self",
".",
"form",
"=",
"DynamicKnowledgeForm",
"else",
":",
"self",
".",
"form",
"=",
"TaxonomyKnowledgeForm",
"form",
"=",
"self",
".",
"form",
"(",
")",
"form",
".",
"kbtype",
".",
"data",
"=",
"kbtype",
"return",
"form"
] |
Create form.
|
[
"Create",
"form",
"."
] |
b31722dc14243ca8f626f8b3bce9718d0119de55
|
https://github.com/inveniosoftware-attic/invenio-knowledge/blob/b31722dc14243ca8f626f8b3bce9718d0119de55/invenio_knowledge/admin.py#L160-L174
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.