id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
239,400
|
willkg/phil
|
phil/util.py
|
out
|
def out(*output, **kwargs):
"""Writes output to stdout.
:arg wrap: If you set ``wrap=False``, then ``out`` won't textwrap
the output.
"""
output = ' '.join([str(o) for o in output])
if kwargs.get('wrap') is not False:
output = '\n'.join(wrap(output, kwargs.get('indent', '')))
elif kwargs.get('indent'):
indent = kwargs['indent']
output = indent + ('\n' + indent).join(output.splitlines())
sys.stdout.write(output + '\n')
|
python
|
def out(*output, **kwargs):
"""Writes output to stdout.
:arg wrap: If you set ``wrap=False``, then ``out`` won't textwrap
the output.
"""
output = ' '.join([str(o) for o in output])
if kwargs.get('wrap') is not False:
output = '\n'.join(wrap(output, kwargs.get('indent', '')))
elif kwargs.get('indent'):
indent = kwargs['indent']
output = indent + ('\n' + indent).join(output.splitlines())
sys.stdout.write(output + '\n')
|
[
"def",
"out",
"(",
"*",
"output",
",",
"*",
"*",
"kwargs",
")",
":",
"output",
"=",
"' '",
".",
"join",
"(",
"[",
"str",
"(",
"o",
")",
"for",
"o",
"in",
"output",
"]",
")",
"if",
"kwargs",
".",
"get",
"(",
"'wrap'",
")",
"is",
"not",
"False",
":",
"output",
"=",
"'\\n'",
".",
"join",
"(",
"wrap",
"(",
"output",
",",
"kwargs",
".",
"get",
"(",
"'indent'",
",",
"''",
")",
")",
")",
"elif",
"kwargs",
".",
"get",
"(",
"'indent'",
")",
":",
"indent",
"=",
"kwargs",
"[",
"'indent'",
"]",
"output",
"=",
"indent",
"+",
"(",
"'\\n'",
"+",
"indent",
")",
".",
"join",
"(",
"output",
".",
"splitlines",
"(",
")",
")",
"sys",
".",
"stdout",
".",
"write",
"(",
"output",
"+",
"'\\n'",
")"
] |
Writes output to stdout.
:arg wrap: If you set ``wrap=False``, then ``out`` won't textwrap
the output.
|
[
"Writes",
"output",
"to",
"stdout",
"."
] |
cb7ed0199cca2c405af9d6d209ddbf739a437e9c
|
https://github.com/willkg/phil/blob/cb7ed0199cca2c405af9d6d209ddbf739a437e9c/phil/util.py#L93-L106
|
239,401
|
willkg/phil
|
phil/util.py
|
convert_rrule
|
def convert_rrule(rrule):
"""Converts icalendar rrule to dateutil rrule."""
args = {}
# TODO: rrule['freq'] is a list, but I'm unclear as to why.
freq = FREQ_MAP[rrule['freq'][0]]
keys = ['wkst', 'until', 'bysetpos', 'interval',
'bymonth', 'bymonthday', 'byyearday', 'byweekno',
'byhour', 'byminute', 'bysecond']
def tweak(rrule, key):
value = rrule.get(key)
if isinstance(value, list):
return value[0]
return value
args = dict((key, tweak(rrule, key)) for key in keys if rrule.get(key))
byweekday = rrule.get('byweekday')
if byweekday:
byweekday = byweekday[0]
count, day = int(byweekday[0]), byweekday[1:]
args['byweekday'] = WEEKDAY_MAP[day](count)
return freq, args
|
python
|
def convert_rrule(rrule):
"""Converts icalendar rrule to dateutil rrule."""
args = {}
# TODO: rrule['freq'] is a list, but I'm unclear as to why.
freq = FREQ_MAP[rrule['freq'][0]]
keys = ['wkst', 'until', 'bysetpos', 'interval',
'bymonth', 'bymonthday', 'byyearday', 'byweekno',
'byhour', 'byminute', 'bysecond']
def tweak(rrule, key):
value = rrule.get(key)
if isinstance(value, list):
return value[0]
return value
args = dict((key, tweak(rrule, key)) for key in keys if rrule.get(key))
byweekday = rrule.get('byweekday')
if byweekday:
byweekday = byweekday[0]
count, day = int(byweekday[0]), byweekday[1:]
args['byweekday'] = WEEKDAY_MAP[day](count)
return freq, args
|
[
"def",
"convert_rrule",
"(",
"rrule",
")",
":",
"args",
"=",
"{",
"}",
"# TODO: rrule['freq'] is a list, but I'm unclear as to why.",
"freq",
"=",
"FREQ_MAP",
"[",
"rrule",
"[",
"'freq'",
"]",
"[",
"0",
"]",
"]",
"keys",
"=",
"[",
"'wkst'",
",",
"'until'",
",",
"'bysetpos'",
",",
"'interval'",
",",
"'bymonth'",
",",
"'bymonthday'",
",",
"'byyearday'",
",",
"'byweekno'",
",",
"'byhour'",
",",
"'byminute'",
",",
"'bysecond'",
"]",
"def",
"tweak",
"(",
"rrule",
",",
"key",
")",
":",
"value",
"=",
"rrule",
".",
"get",
"(",
"key",
")",
"if",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"return",
"value",
"[",
"0",
"]",
"return",
"value",
"args",
"=",
"dict",
"(",
"(",
"key",
",",
"tweak",
"(",
"rrule",
",",
"key",
")",
")",
"for",
"key",
"in",
"keys",
"if",
"rrule",
".",
"get",
"(",
"key",
")",
")",
"byweekday",
"=",
"rrule",
".",
"get",
"(",
"'byweekday'",
")",
"if",
"byweekday",
":",
"byweekday",
"=",
"byweekday",
"[",
"0",
"]",
"count",
",",
"day",
"=",
"int",
"(",
"byweekday",
"[",
"0",
"]",
")",
",",
"byweekday",
"[",
"1",
":",
"]",
"args",
"[",
"'byweekday'",
"]",
"=",
"WEEKDAY_MAP",
"[",
"day",
"]",
"(",
"count",
")",
"return",
"freq",
",",
"args"
] |
Converts icalendar rrule to dateutil rrule.
|
[
"Converts",
"icalendar",
"rrule",
"to",
"dateutil",
"rrule",
"."
] |
cb7ed0199cca2c405af9d6d209ddbf739a437e9c
|
https://github.com/willkg/phil/blob/cb7ed0199cca2c405af9d6d209ddbf739a437e9c/phil/util.py#L198-L223
|
239,402
|
willkg/phil
|
phil/util.py
|
parse_ics
|
def parse_ics(icsfile):
"""Takes an icsfilename, parses it, and returns Events."""
events = []
cal = Calendar.from_ical(open(icsfile, 'rb').read())
for component in cal.walk('vevent'):
dtstart = component['dtstart'].dt
rrule = component['rrule']
freq, args = convert_rrule(rrule)
args['dtstart'] = dtstart
rrule = dateutil.rrule.rrule(freq, **args)
summary = vText.from_ical(component.get('summary', u''))
description = vText.from_ical(component.get('description', u''))
organizer = vText.from_ical(component.get('organizer', u''))
# TODO: Find an event id. If it's not there, then compose one
# with dtstart, summary, and organizer.
event_id = "::".join((str(dtstart), summary, organizer))
events.append(Event(event_id, rrule, summary, description))
return events
|
python
|
def parse_ics(icsfile):
"""Takes an icsfilename, parses it, and returns Events."""
events = []
cal = Calendar.from_ical(open(icsfile, 'rb').read())
for component in cal.walk('vevent'):
dtstart = component['dtstart'].dt
rrule = component['rrule']
freq, args = convert_rrule(rrule)
args['dtstart'] = dtstart
rrule = dateutil.rrule.rrule(freq, **args)
summary = vText.from_ical(component.get('summary', u''))
description = vText.from_ical(component.get('description', u''))
organizer = vText.from_ical(component.get('organizer', u''))
# TODO: Find an event id. If it's not there, then compose one
# with dtstart, summary, and organizer.
event_id = "::".join((str(dtstart), summary, organizer))
events.append(Event(event_id, rrule, summary, description))
return events
|
[
"def",
"parse_ics",
"(",
"icsfile",
")",
":",
"events",
"=",
"[",
"]",
"cal",
"=",
"Calendar",
".",
"from_ical",
"(",
"open",
"(",
"icsfile",
",",
"'rb'",
")",
".",
"read",
"(",
")",
")",
"for",
"component",
"in",
"cal",
".",
"walk",
"(",
"'vevent'",
")",
":",
"dtstart",
"=",
"component",
"[",
"'dtstart'",
"]",
".",
"dt",
"rrule",
"=",
"component",
"[",
"'rrule'",
"]",
"freq",
",",
"args",
"=",
"convert_rrule",
"(",
"rrule",
")",
"args",
"[",
"'dtstart'",
"]",
"=",
"dtstart",
"rrule",
"=",
"dateutil",
".",
"rrule",
".",
"rrule",
"(",
"freq",
",",
"*",
"*",
"args",
")",
"summary",
"=",
"vText",
".",
"from_ical",
"(",
"component",
".",
"get",
"(",
"'summary'",
",",
"u''",
")",
")",
"description",
"=",
"vText",
".",
"from_ical",
"(",
"component",
".",
"get",
"(",
"'description'",
",",
"u''",
")",
")",
"organizer",
"=",
"vText",
".",
"from_ical",
"(",
"component",
".",
"get",
"(",
"'organizer'",
",",
"u''",
")",
")",
"# TODO: Find an event id. If it's not there, then compose one",
"# with dtstart, summary, and organizer.",
"event_id",
"=",
"\"::\"",
".",
"join",
"(",
"(",
"str",
"(",
"dtstart",
")",
",",
"summary",
",",
"organizer",
")",
")",
"events",
".",
"append",
"(",
"Event",
"(",
"event_id",
",",
"rrule",
",",
"summary",
",",
"description",
")",
")",
"return",
"events"
] |
Takes an icsfilename, parses it, and returns Events.
|
[
"Takes",
"an",
"icsfilename",
"parses",
"it",
"and",
"returns",
"Events",
"."
] |
cb7ed0199cca2c405af9d6d209ddbf739a437e9c
|
https://github.com/willkg/phil/blob/cb7ed0199cca2c405af9d6d209ddbf739a437e9c/phil/util.py#L226-L249
|
239,403
|
wickman/compactor
|
compactor/process.py
|
Process.route
|
def route(cls, path):
"""A decorator to indicate that a method should be a routable HTTP endpoint.
.. code-block:: python
from compactor.process import Process
class WebProcess(Process):
@Process.route('/hello/world')
def hello_world(self, handler):
return handler.write('<html><title>hello world</title></html>')
The handler passed to the method is a tornado RequestHandler.
WARNING: This interface is alpha and may change in the future if or when
we remove tornado as a compactor dependency.
:param path: The endpoint to route to this method.
:type path: ``str``
"""
if not path.startswith('/'):
raise ValueError('Routes must start with "/"')
def wrap(fn):
setattr(fn, cls.ROUTE_ATTRIBUTE, path)
return fn
return wrap
|
python
|
def route(cls, path):
"""A decorator to indicate that a method should be a routable HTTP endpoint.
.. code-block:: python
from compactor.process import Process
class WebProcess(Process):
@Process.route('/hello/world')
def hello_world(self, handler):
return handler.write('<html><title>hello world</title></html>')
The handler passed to the method is a tornado RequestHandler.
WARNING: This interface is alpha and may change in the future if or when
we remove tornado as a compactor dependency.
:param path: The endpoint to route to this method.
:type path: ``str``
"""
if not path.startswith('/'):
raise ValueError('Routes must start with "/"')
def wrap(fn):
setattr(fn, cls.ROUTE_ATTRIBUTE, path)
return fn
return wrap
|
[
"def",
"route",
"(",
"cls",
",",
"path",
")",
":",
"if",
"not",
"path",
".",
"startswith",
"(",
"'/'",
")",
":",
"raise",
"ValueError",
"(",
"'Routes must start with \"/\"'",
")",
"def",
"wrap",
"(",
"fn",
")",
":",
"setattr",
"(",
"fn",
",",
"cls",
".",
"ROUTE_ATTRIBUTE",
",",
"path",
")",
"return",
"fn",
"return",
"wrap"
] |
A decorator to indicate that a method should be a routable HTTP endpoint.
.. code-block:: python
from compactor.process import Process
class WebProcess(Process):
@Process.route('/hello/world')
def hello_world(self, handler):
return handler.write('<html><title>hello world</title></html>')
The handler passed to the method is a tornado RequestHandler.
WARNING: This interface is alpha and may change in the future if or when
we remove tornado as a compactor dependency.
:param path: The endpoint to route to this method.
:type path: ``str``
|
[
"A",
"decorator",
"to",
"indicate",
"that",
"a",
"method",
"should",
"be",
"a",
"routable",
"HTTP",
"endpoint",
"."
] |
52714be3d84aa595a212feccb4d92ec250cede2a
|
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L15-L43
|
239,404
|
wickman/compactor
|
compactor/process.py
|
Process.install
|
def install(cls, mbox):
"""A decorator to indicate a remotely callable method on a process.
.. code-block:: python
from compactor.process import Process
class PingProcess(Process):
@Process.install('ping')
def ping(self, from_pid, body):
# do something
The installed method should take ``from_pid`` and ``body`` parameters.
``from_pid`` is the process calling the method. ``body`` is a ``bytes``
stream that was delivered with the message, possibly empty.
:param mbox: Incoming messages to this "mailbox" will be dispatched to this method.
:type mbox: ``str``
"""
def wrap(fn):
setattr(fn, cls.INSTALL_ATTRIBUTE, mbox)
return fn
return wrap
|
python
|
def install(cls, mbox):
"""A decorator to indicate a remotely callable method on a process.
.. code-block:: python
from compactor.process import Process
class PingProcess(Process):
@Process.install('ping')
def ping(self, from_pid, body):
# do something
The installed method should take ``from_pid`` and ``body`` parameters.
``from_pid`` is the process calling the method. ``body`` is a ``bytes``
stream that was delivered with the message, possibly empty.
:param mbox: Incoming messages to this "mailbox" will be dispatched to this method.
:type mbox: ``str``
"""
def wrap(fn):
setattr(fn, cls.INSTALL_ATTRIBUTE, mbox)
return fn
return wrap
|
[
"def",
"install",
"(",
"cls",
",",
"mbox",
")",
":",
"def",
"wrap",
"(",
"fn",
")",
":",
"setattr",
"(",
"fn",
",",
"cls",
".",
"INSTALL_ATTRIBUTE",
",",
"mbox",
")",
"return",
"fn",
"return",
"wrap"
] |
A decorator to indicate a remotely callable method on a process.
.. code-block:: python
from compactor.process import Process
class PingProcess(Process):
@Process.install('ping')
def ping(self, from_pid, body):
# do something
The installed method should take ``from_pid`` and ``body`` parameters.
``from_pid`` is the process calling the method. ``body`` is a ``bytes``
stream that was delivered with the message, possibly empty.
:param mbox: Incoming messages to this "mailbox" will be dispatched to this method.
:type mbox: ``str``
|
[
"A",
"decorator",
"to",
"indicate",
"a",
"remotely",
"callable",
"method",
"on",
"a",
"process",
"."
] |
52714be3d84aa595a212feccb4d92ec250cede2a
|
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L49-L71
|
239,405
|
wickman/compactor
|
compactor/process.py
|
Process.pid
|
def pid(self):
"""The pid of this process.
:raises: Will raise a ``Process.UnboundProcess`` exception if the
process is not bound to a context.
"""
self._assert_bound()
return PID(self._context.ip, self._context.port, self.name)
|
python
|
def pid(self):
"""The pid of this process.
:raises: Will raise a ``Process.UnboundProcess`` exception if the
process is not bound to a context.
"""
self._assert_bound()
return PID(self._context.ip, self._context.port, self.name)
|
[
"def",
"pid",
"(",
"self",
")",
":",
"self",
".",
"_assert_bound",
"(",
")",
"return",
"PID",
"(",
"self",
".",
"_context",
".",
"ip",
",",
"self",
".",
"_context",
".",
"port",
",",
"self",
".",
"name",
")"
] |
The pid of this process.
:raises: Will raise a ``Process.UnboundProcess`` exception if the
process is not bound to a context.
|
[
"The",
"pid",
"of",
"this",
"process",
"."
] |
52714be3d84aa595a212feccb4d92ec250cede2a
|
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L120-L127
|
239,406
|
wickman/compactor
|
compactor/process.py
|
Process.link
|
def link(self, to):
"""Link to another process.
The ``link`` operation is not guaranteed to succeed. If it does, when
the other process terminates, the ``exited`` method will be called with
its pid.
Returns immediately.
:param to: The pid of the process to send a message.
:type to: :class:`PID`
:raises: Will raise a ``Process.UnboundProcess`` exception if the
process is not bound to a context.
:return: Nothing
"""
self._assert_bound()
self._context.link(self.pid, to)
|
python
|
def link(self, to):
"""Link to another process.
The ``link`` operation is not guaranteed to succeed. If it does, when
the other process terminates, the ``exited`` method will be called with
its pid.
Returns immediately.
:param to: The pid of the process to send a message.
:type to: :class:`PID`
:raises: Will raise a ``Process.UnboundProcess`` exception if the
process is not bound to a context.
:return: Nothing
"""
self._assert_bound()
self._context.link(self.pid, to)
|
[
"def",
"link",
"(",
"self",
",",
"to",
")",
":",
"self",
".",
"_assert_bound",
"(",
")",
"self",
".",
"_context",
".",
"link",
"(",
"self",
".",
"pid",
",",
"to",
")"
] |
Link to another process.
The ``link`` operation is not guaranteed to succeed. If it does, when
the other process terminates, the ``exited`` method will be called with
its pid.
Returns immediately.
:param to: The pid of the process to send a message.
:type to: :class:`PID`
:raises: Will raise a ``Process.UnboundProcess`` exception if the
process is not bound to a context.
:return: Nothing
|
[
"Link",
"to",
"another",
"process",
"."
] |
52714be3d84aa595a212feccb4d92ec250cede2a
|
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L195-L211
|
239,407
|
wickman/compactor
|
compactor/process.py
|
ProtobufProcess.install
|
def install(cls, message_type):
"""A decorator to indicate a remotely callable method on a process using protocol buffers.
.. code-block:: python
from compactor.process import ProtobufProcess
from messages_pb2 import RequestMessage, ResponseMessage
class PingProcess(ProtobufProcess):
@ProtobufProcess.install(RequestMessage)
def ping(self, from_pid, message):
# do something with message, a RequestMessage
response = ResponseMessage(...)
# send a protocol buffer which will get serialized on the wire.
self.send(from_pid, response)
The installed method should take ``from_pid`` and ``message`` parameters.
``from_pid`` is the process calling the method. ``message`` is a protocol
buffer of the installed type.
:param message_type: Incoming messages to this message_type will be dispatched to this method.
:type message_type: A generated protocol buffer stub
"""
def wrap(fn):
@functools.wraps(fn)
def wrapped_fn(self, from_pid, message_str):
message = message_type()
message.MergeFromString(message_str)
return fn(self, from_pid, message)
return Process.install(message_type.DESCRIPTOR.full_name)(wrapped_fn)
return wrap
|
python
|
def install(cls, message_type):
"""A decorator to indicate a remotely callable method on a process using protocol buffers.
.. code-block:: python
from compactor.process import ProtobufProcess
from messages_pb2 import RequestMessage, ResponseMessage
class PingProcess(ProtobufProcess):
@ProtobufProcess.install(RequestMessage)
def ping(self, from_pid, message):
# do something with message, a RequestMessage
response = ResponseMessage(...)
# send a protocol buffer which will get serialized on the wire.
self.send(from_pid, response)
The installed method should take ``from_pid`` and ``message`` parameters.
``from_pid`` is the process calling the method. ``message`` is a protocol
buffer of the installed type.
:param message_type: Incoming messages to this message_type will be dispatched to this method.
:type message_type: A generated protocol buffer stub
"""
def wrap(fn):
@functools.wraps(fn)
def wrapped_fn(self, from_pid, message_str):
message = message_type()
message.MergeFromString(message_str)
return fn(self, from_pid, message)
return Process.install(message_type.DESCRIPTOR.full_name)(wrapped_fn)
return wrap
|
[
"def",
"install",
"(",
"cls",
",",
"message_type",
")",
":",
"def",
"wrap",
"(",
"fn",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"fn",
")",
"def",
"wrapped_fn",
"(",
"self",
",",
"from_pid",
",",
"message_str",
")",
":",
"message",
"=",
"message_type",
"(",
")",
"message",
".",
"MergeFromString",
"(",
"message_str",
")",
"return",
"fn",
"(",
"self",
",",
"from_pid",
",",
"message",
")",
"return",
"Process",
".",
"install",
"(",
"message_type",
".",
"DESCRIPTOR",
".",
"full_name",
")",
"(",
"wrapped_fn",
")",
"return",
"wrap"
] |
A decorator to indicate a remotely callable method on a process using protocol buffers.
.. code-block:: python
from compactor.process import ProtobufProcess
from messages_pb2 import RequestMessage, ResponseMessage
class PingProcess(ProtobufProcess):
@ProtobufProcess.install(RequestMessage)
def ping(self, from_pid, message):
# do something with message, a RequestMessage
response = ResponseMessage(...)
# send a protocol buffer which will get serialized on the wire.
self.send(from_pid, response)
The installed method should take ``from_pid`` and ``message`` parameters.
``from_pid`` is the process calling the method. ``message`` is a protocol
buffer of the installed type.
:param message_type: Incoming messages to this message_type will be dispatched to this method.
:type message_type: A generated protocol buffer stub
|
[
"A",
"decorator",
"to",
"indicate",
"a",
"remotely",
"callable",
"method",
"on",
"a",
"process",
"using",
"protocol",
"buffers",
"."
] |
52714be3d84aa595a212feccb4d92ec250cede2a
|
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L227-L257
|
239,408
|
s-m-i-t-a/railroad
|
railroad/actions.py
|
actions
|
def actions(acts, done):
'''
Prepare actions pipeline.
:param tuple acts: called functions
:param function done: get result from actions
:returns function: function that starts executio
'''
def _intermediate(acc, action):
result = action(acc['state'])
values = concatv(acc['values'], [result['answer']])
return {'values': values, 'state': result['state']}
def _actions(seed):
init = {'values': [], 'state': seed}
result = reduce(_intermediate, acts, init)
keep = remove(lambda x: x is None, result['values'])
return done(keep, result['state'])
return _actions
|
python
|
def actions(acts, done):
'''
Prepare actions pipeline.
:param tuple acts: called functions
:param function done: get result from actions
:returns function: function that starts executio
'''
def _intermediate(acc, action):
result = action(acc['state'])
values = concatv(acc['values'], [result['answer']])
return {'values': values, 'state': result['state']}
def _actions(seed):
init = {'values': [], 'state': seed}
result = reduce(_intermediate, acts, init)
keep = remove(lambda x: x is None, result['values'])
return done(keep, result['state'])
return _actions
|
[
"def",
"actions",
"(",
"acts",
",",
"done",
")",
":",
"def",
"_intermediate",
"(",
"acc",
",",
"action",
")",
":",
"result",
"=",
"action",
"(",
"acc",
"[",
"'state'",
"]",
")",
"values",
"=",
"concatv",
"(",
"acc",
"[",
"'values'",
"]",
",",
"[",
"result",
"[",
"'answer'",
"]",
"]",
")",
"return",
"{",
"'values'",
":",
"values",
",",
"'state'",
":",
"result",
"[",
"'state'",
"]",
"}",
"def",
"_actions",
"(",
"seed",
")",
":",
"init",
"=",
"{",
"'values'",
":",
"[",
"]",
",",
"'state'",
":",
"seed",
"}",
"result",
"=",
"reduce",
"(",
"_intermediate",
",",
"acts",
",",
"init",
")",
"keep",
"=",
"remove",
"(",
"lambda",
"x",
":",
"x",
"is",
"None",
",",
"result",
"[",
"'values'",
"]",
")",
"return",
"done",
"(",
"keep",
",",
"result",
"[",
"'state'",
"]",
")",
"return",
"_actions"
] |
Prepare actions pipeline.
:param tuple acts: called functions
:param function done: get result from actions
:returns function: function that starts executio
|
[
"Prepare",
"actions",
"pipeline",
"."
] |
ddb4afa018b8523b5d8c3a86e55388d1ea0ab37c
|
https://github.com/s-m-i-t-a/railroad/blob/ddb4afa018b8523b5d8c3a86e55388d1ea0ab37c/railroad/actions.py#L8-L32
|
239,409
|
s-m-i-t-a/railroad
|
railroad/actions.py
|
lift
|
def lift(fn=None, state_fn=None):
"""
The lift decorator function will be used to abstract away the management
of the state object used as the intermediate representation of actions.
:param function answer: a function to provide
the result of some action given a value
:param function state: a function to provide what the new state looks like
:returns function: a function suitable for use in actions
"""
if fn is None:
return partial(lift, state_fn=state_fn)
@wraps(fn)
def _lift(*args, **kwargs):
def _run(state):
ans = fn(*cons(state, args), **kwargs)
s = state_fn(state) if state_fn is not None else ans
return {'answer': ans, 'state': s}
return _run
return _lift
|
python
|
def lift(fn=None, state_fn=None):
"""
The lift decorator function will be used to abstract away the management
of the state object used as the intermediate representation of actions.
:param function answer: a function to provide
the result of some action given a value
:param function state: a function to provide what the new state looks like
:returns function: a function suitable for use in actions
"""
if fn is None:
return partial(lift, state_fn=state_fn)
@wraps(fn)
def _lift(*args, **kwargs):
def _run(state):
ans = fn(*cons(state, args), **kwargs)
s = state_fn(state) if state_fn is not None else ans
return {'answer': ans, 'state': s}
return _run
return _lift
|
[
"def",
"lift",
"(",
"fn",
"=",
"None",
",",
"state_fn",
"=",
"None",
")",
":",
"if",
"fn",
"is",
"None",
":",
"return",
"partial",
"(",
"lift",
",",
"state_fn",
"=",
"state_fn",
")",
"@",
"wraps",
"(",
"fn",
")",
"def",
"_lift",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"_run",
"(",
"state",
")",
":",
"ans",
"=",
"fn",
"(",
"*",
"cons",
"(",
"state",
",",
"args",
")",
",",
"*",
"*",
"kwargs",
")",
"s",
"=",
"state_fn",
"(",
"state",
")",
"if",
"state_fn",
"is",
"not",
"None",
"else",
"ans",
"return",
"{",
"'answer'",
":",
"ans",
",",
"'state'",
":",
"s",
"}",
"return",
"_run",
"return",
"_lift"
] |
The lift decorator function will be used to abstract away the management
of the state object used as the intermediate representation of actions.
:param function answer: a function to provide
the result of some action given a value
:param function state: a function to provide what the new state looks like
:returns function: a function suitable for use in actions
|
[
"The",
"lift",
"decorator",
"function",
"will",
"be",
"used",
"to",
"abstract",
"away",
"the",
"management",
"of",
"the",
"state",
"object",
"used",
"as",
"the",
"intermediate",
"representation",
"of",
"actions",
"."
] |
ddb4afa018b8523b5d8c3a86e55388d1ea0ab37c
|
https://github.com/s-m-i-t-a/railroad/blob/ddb4afa018b8523b5d8c3a86e55388d1ea0ab37c/railroad/actions.py#L35-L56
|
239,410
|
kervi/kervi-devices
|
kervi/devices/motors/LN298.py
|
_DCMotorController._set_speed
|
def _set_speed(self, motor, speed):
"""
Change the speed of a motor on the controller.
:param motor: The motor to change.
:type motor: ``int``
:param speed: Speed from -100 to +100, 0 is stop
:type speed: ``int``
"""
self._validate_motor(motor)
en, in1, in2 = self._motors[motor-1]
if speed == 0:
en.pwm_stop()
in1.set(False)
in2.set(False)
elif speed > 0:
en.pwm_start(abs(speed))
in1.set(True)
in2.set(False)
else:
en.pwm_start(abs(speed))
in1.set(False)
in2.set(True)
|
python
|
def _set_speed(self, motor, speed):
"""
Change the speed of a motor on the controller.
:param motor: The motor to change.
:type motor: ``int``
:param speed: Speed from -100 to +100, 0 is stop
:type speed: ``int``
"""
self._validate_motor(motor)
en, in1, in2 = self._motors[motor-1]
if speed == 0:
en.pwm_stop()
in1.set(False)
in2.set(False)
elif speed > 0:
en.pwm_start(abs(speed))
in1.set(True)
in2.set(False)
else:
en.pwm_start(abs(speed))
in1.set(False)
in2.set(True)
|
[
"def",
"_set_speed",
"(",
"self",
",",
"motor",
",",
"speed",
")",
":",
"self",
".",
"_validate_motor",
"(",
"motor",
")",
"en",
",",
"in1",
",",
"in2",
"=",
"self",
".",
"_motors",
"[",
"motor",
"-",
"1",
"]",
"if",
"speed",
"==",
"0",
":",
"en",
".",
"pwm_stop",
"(",
")",
"in1",
".",
"set",
"(",
"False",
")",
"in2",
".",
"set",
"(",
"False",
")",
"elif",
"speed",
">",
"0",
":",
"en",
".",
"pwm_start",
"(",
"abs",
"(",
"speed",
")",
")",
"in1",
".",
"set",
"(",
"True",
")",
"in2",
".",
"set",
"(",
"False",
")",
"else",
":",
"en",
".",
"pwm_start",
"(",
"abs",
"(",
"speed",
")",
")",
"in1",
".",
"set",
"(",
"False",
")",
"in2",
".",
"set",
"(",
"True",
")"
] |
Change the speed of a motor on the controller.
:param motor: The motor to change.
:type motor: ``int``
:param speed: Speed from -100 to +100, 0 is stop
:type speed: ``int``
|
[
"Change",
"the",
"speed",
"of",
"a",
"motor",
"on",
"the",
"controller",
"."
] |
c6aaddc6da1d0bce0ea2b0c6eb8393ba10aefa56
|
https://github.com/kervi/kervi-devices/blob/c6aaddc6da1d0bce0ea2b0c6eb8393ba10aefa56/kervi/devices/motors/LN298.py#L14-L39
|
239,411
|
arximboldi/cooper
|
cooper/cooper.py
|
check_all_params_are_keyword
|
def check_all_params_are_keyword(method):
"""
Raises CooperativeError if method any parameter that is not a
named keyword parameter
"""
args, varargs, keywords, defaults = inspect.getargspec(method)
# Always have self, thus the -1
if len(args or []) - 1 != len(defaults or []):
raise CooperativeError, "Init has positional parameters " + \
str(args[1:])
if varargs:
raise CooperativeError, "Init has variadic positional parameters"
if keywords:
raise CooperativeError, "Init has variadic keyword parameters"
|
python
|
def check_all_params_are_keyword(method):
"""
Raises CooperativeError if method any parameter that is not a
named keyword parameter
"""
args, varargs, keywords, defaults = inspect.getargspec(method)
# Always have self, thus the -1
if len(args or []) - 1 != len(defaults or []):
raise CooperativeError, "Init has positional parameters " + \
str(args[1:])
if varargs:
raise CooperativeError, "Init has variadic positional parameters"
if keywords:
raise CooperativeError, "Init has variadic keyword parameters"
|
[
"def",
"check_all_params_are_keyword",
"(",
"method",
")",
":",
"args",
",",
"varargs",
",",
"keywords",
",",
"defaults",
"=",
"inspect",
".",
"getargspec",
"(",
"method",
")",
"# Always have self, thus the -1",
"if",
"len",
"(",
"args",
"or",
"[",
"]",
")",
"-",
"1",
"!=",
"len",
"(",
"defaults",
"or",
"[",
"]",
")",
":",
"raise",
"CooperativeError",
",",
"\"Init has positional parameters \"",
"+",
"str",
"(",
"args",
"[",
"1",
":",
"]",
")",
"if",
"varargs",
":",
"raise",
"CooperativeError",
",",
"\"Init has variadic positional parameters\"",
"if",
"keywords",
":",
"raise",
"CooperativeError",
",",
"\"Init has variadic keyword parameters\""
] |
Raises CooperativeError if method any parameter that is not a
named keyword parameter
|
[
"Raises",
"CooperativeError",
"if",
"method",
"any",
"parameter",
"that",
"is",
"not",
"a",
"named",
"keyword",
"parameter"
] |
639d00932a21290fb9d7fe1cb0aed283b6785820
|
https://github.com/arximboldi/cooper/blob/639d00932a21290fb9d7fe1cb0aed283b6785820/cooper/cooper.py#L45-L60
|
239,412
|
arximboldi/cooper
|
cooper/cooper.py
|
make_keyword_extractor
|
def make_keyword_extractor(method):
"""
Removes all keyword parameters required by 'method' from
dictionary 'keys' and returns them in a separate dictionary.
"""
args, _1, _2, defs = inspect.getargspec(method)
key_args = args[-len(defs or []):]
def extractor(keys):
new = {}
for a in key_args:
if a in keys:
new[a] = keys[a]
del keys[a]
return new
return extractor
|
python
|
def make_keyword_extractor(method):
"""
Removes all keyword parameters required by 'method' from
dictionary 'keys' and returns them in a separate dictionary.
"""
args, _1, _2, defs = inspect.getargspec(method)
key_args = args[-len(defs or []):]
def extractor(keys):
new = {}
for a in key_args:
if a in keys:
new[a] = keys[a]
del keys[a]
return new
return extractor
|
[
"def",
"make_keyword_extractor",
"(",
"method",
")",
":",
"args",
",",
"_1",
",",
"_2",
",",
"defs",
"=",
"inspect",
".",
"getargspec",
"(",
"method",
")",
"key_args",
"=",
"args",
"[",
"-",
"len",
"(",
"defs",
"or",
"[",
"]",
")",
":",
"]",
"def",
"extractor",
"(",
"keys",
")",
":",
"new",
"=",
"{",
"}",
"for",
"a",
"in",
"key_args",
":",
"if",
"a",
"in",
"keys",
":",
"new",
"[",
"a",
"]",
"=",
"keys",
"[",
"a",
"]",
"del",
"keys",
"[",
"a",
"]",
"return",
"new",
"return",
"extractor"
] |
Removes all keyword parameters required by 'method' from
dictionary 'keys' and returns them in a separate dictionary.
|
[
"Removes",
"all",
"keyword",
"parameters",
"required",
"by",
"method",
"from",
"dictionary",
"keys",
"and",
"returns",
"them",
"in",
"a",
"separate",
"dictionary",
"."
] |
639d00932a21290fb9d7fe1cb0aed283b6785820
|
https://github.com/arximboldi/cooper/blob/639d00932a21290fb9d7fe1cb0aed283b6785820/cooper/cooper.py#L65-L80
|
239,413
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
JB_Plugin.get_config
|
def get_config(self, ):
"""Return the user config for this plugin
You have to provide a configspec,
put the configspec file in the same folder as your plugin.
Name it like your class and put 'ini' as extension.
"""
# get the module of the plugin class
mod = sys.modules[self.__module__]
# get the file from where it was imported
modfile = mod.__file__
# get the module directory
specdir = os.path.dirname(modfile)
# get the classname
cname = self.__class__.__name__
# add the extension
confname = os.extsep.join((cname, CONFIG_EXT))
specpath = os.path.join(specdir, confname)
if not os.path.exists(specpath):
return None
confpath = os.path.join(PLUGIN_CONFIG_DIR, confname)
return load_config(confpath, specpath)
|
python
|
def get_config(self, ):
"""Return the user config for this plugin
You have to provide a configspec,
put the configspec file in the same folder as your plugin.
Name it like your class and put 'ini' as extension.
"""
# get the module of the plugin class
mod = sys.modules[self.__module__]
# get the file from where it was imported
modfile = mod.__file__
# get the module directory
specdir = os.path.dirname(modfile)
# get the classname
cname = self.__class__.__name__
# add the extension
confname = os.extsep.join((cname, CONFIG_EXT))
specpath = os.path.join(specdir, confname)
if not os.path.exists(specpath):
return None
confpath = os.path.join(PLUGIN_CONFIG_DIR, confname)
return load_config(confpath, specpath)
|
[
"def",
"get_config",
"(",
"self",
",",
")",
":",
"# get the module of the plugin class",
"mod",
"=",
"sys",
".",
"modules",
"[",
"self",
".",
"__module__",
"]",
"# get the file from where it was imported",
"modfile",
"=",
"mod",
".",
"__file__",
"# get the module directory",
"specdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"modfile",
")",
"# get the classname",
"cname",
"=",
"self",
".",
"__class__",
".",
"__name__",
"# add the extension",
"confname",
"=",
"os",
".",
"extsep",
".",
"join",
"(",
"(",
"cname",
",",
"CONFIG_EXT",
")",
")",
"specpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"specdir",
",",
"confname",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"specpath",
")",
":",
"return",
"None",
"confpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"PLUGIN_CONFIG_DIR",
",",
"confname",
")",
"return",
"load_config",
"(",
"confpath",
",",
"specpath",
")"
] |
Return the user config for this plugin
You have to provide a configspec,
put the configspec file in the same folder as your plugin.
Name it like your class and put 'ini' as extension.
|
[
"Return",
"the",
"user",
"config",
"for",
"this",
"plugin"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L141-L163
|
239,414
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
PluginManager.find_plugins
|
def find_plugins(self, path):
"""Return a list with all plugins found in path
:param path: the directory with plugins
:type path: str
:returns: list of JB_Plugin subclasses
:rtype: list
:raises: None
"""
ext = os.extsep+'py'
files = []
for (dirpath, dirnames, filenames) in os.walk(path):
files.extend([os.path.join(dirpath, x) for x in filenames if x.endswith(ext)])
plugins = []
for f in files:
try:
mod = self.__import_file(f)
except Exception:
tb = traceback.format_exc()
log.debug("Importing plugin from %s failed!\n%s" % (f, tb))
continue
# get all classes in the imported file
members = inspect.getmembers(mod, lambda x: inspect.isclass(x))
# only get classes which are defined, not imported, in mod
classes = [m[1] for m in members if m[1].__module__ == mod.__name__]
for c in classes:
# if the class is derived from a supported type append it
# we test if it is a subclass of a supported type but not a supported type itself
# because that might be the abstract class
if any(issubclass(c, supported) for supported in self.supportedTypes)\
and c not in self.supportedTypes:
plugins.append(c)
return plugins
|
python
|
def find_plugins(self, path):
"""Return a list with all plugins found in path
:param path: the directory with plugins
:type path: str
:returns: list of JB_Plugin subclasses
:rtype: list
:raises: None
"""
ext = os.extsep+'py'
files = []
for (dirpath, dirnames, filenames) in os.walk(path):
files.extend([os.path.join(dirpath, x) for x in filenames if x.endswith(ext)])
plugins = []
for f in files:
try:
mod = self.__import_file(f)
except Exception:
tb = traceback.format_exc()
log.debug("Importing plugin from %s failed!\n%s" % (f, tb))
continue
# get all classes in the imported file
members = inspect.getmembers(mod, lambda x: inspect.isclass(x))
# only get classes which are defined, not imported, in mod
classes = [m[1] for m in members if m[1].__module__ == mod.__name__]
for c in classes:
# if the class is derived from a supported type append it
# we test if it is a subclass of a supported type but not a supported type itself
# because that might be the abstract class
if any(issubclass(c, supported) for supported in self.supportedTypes)\
and c not in self.supportedTypes:
plugins.append(c)
return plugins
|
[
"def",
"find_plugins",
"(",
"self",
",",
"path",
")",
":",
"ext",
"=",
"os",
".",
"extsep",
"+",
"'py'",
"files",
"=",
"[",
"]",
"for",
"(",
"dirpath",
",",
"dirnames",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"files",
".",
"extend",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"x",
")",
"for",
"x",
"in",
"filenames",
"if",
"x",
".",
"endswith",
"(",
"ext",
")",
"]",
")",
"plugins",
"=",
"[",
"]",
"for",
"f",
"in",
"files",
":",
"try",
":",
"mod",
"=",
"self",
".",
"__import_file",
"(",
"f",
")",
"except",
"Exception",
":",
"tb",
"=",
"traceback",
".",
"format_exc",
"(",
")",
"log",
".",
"debug",
"(",
"\"Importing plugin from %s failed!\\n%s\"",
"%",
"(",
"f",
",",
"tb",
")",
")",
"continue",
"# get all classes in the imported file",
"members",
"=",
"inspect",
".",
"getmembers",
"(",
"mod",
",",
"lambda",
"x",
":",
"inspect",
".",
"isclass",
"(",
"x",
")",
")",
"# only get classes which are defined, not imported, in mod",
"classes",
"=",
"[",
"m",
"[",
"1",
"]",
"for",
"m",
"in",
"members",
"if",
"m",
"[",
"1",
"]",
".",
"__module__",
"==",
"mod",
".",
"__name__",
"]",
"for",
"c",
"in",
"classes",
":",
"# if the class is derived from a supported type append it",
"# we test if it is a subclass of a supported type but not a supported type itself",
"# because that might be the abstract class",
"if",
"any",
"(",
"issubclass",
"(",
"c",
",",
"supported",
")",
"for",
"supported",
"in",
"self",
".",
"supportedTypes",
")",
"and",
"c",
"not",
"in",
"self",
".",
"supportedTypes",
":",
"plugins",
".",
"append",
"(",
"c",
")",
"return",
"plugins"
] |
Return a list with all plugins found in path
:param path: the directory with plugins
:type path: str
:returns: list of JB_Plugin subclasses
:rtype: list
:raises: None
|
[
"Return",
"a",
"list",
"with",
"all",
"plugins",
"found",
"in",
"path"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L301-L333
|
239,415
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
PluginManager.gather_plugins
|
def gather_plugins(self):
"""Return all plugins that are found in the plugin paths
Looks in the envvar ``JUKEBOX_PLUGIN_PATH``.
:returns:
:rtype:
:raises:
"""
plugins = []
cfg = get_core_config()
pathenv = cfg['jukebox']['pluginpaths']
pathenv = os.pathsep.join((pathenv, os.environ.get("JUKEBOX_PLUGIN_PATH", "")))
paths = pathenv.split(os.pathsep)
# first find built-ins then the ones in the config, then the one from the environment
# so user plugins can override built-ins
for p in reversed(paths):
if p and os.path.exists(p): # in case of an empty string, we do not search!
plugins.extend(self.find_plugins(p))
return plugins
|
python
|
def gather_plugins(self):
"""Return all plugins that are found in the plugin paths
Looks in the envvar ``JUKEBOX_PLUGIN_PATH``.
:returns:
:rtype:
:raises:
"""
plugins = []
cfg = get_core_config()
pathenv = cfg['jukebox']['pluginpaths']
pathenv = os.pathsep.join((pathenv, os.environ.get("JUKEBOX_PLUGIN_PATH", "")))
paths = pathenv.split(os.pathsep)
# first find built-ins then the ones in the config, then the one from the environment
# so user plugins can override built-ins
for p in reversed(paths):
if p and os.path.exists(p): # in case of an empty string, we do not search!
plugins.extend(self.find_plugins(p))
return plugins
|
[
"def",
"gather_plugins",
"(",
"self",
")",
":",
"plugins",
"=",
"[",
"]",
"cfg",
"=",
"get_core_config",
"(",
")",
"pathenv",
"=",
"cfg",
"[",
"'jukebox'",
"]",
"[",
"'pluginpaths'",
"]",
"pathenv",
"=",
"os",
".",
"pathsep",
".",
"join",
"(",
"(",
"pathenv",
",",
"os",
".",
"environ",
".",
"get",
"(",
"\"JUKEBOX_PLUGIN_PATH\"",
",",
"\"\"",
")",
")",
")",
"paths",
"=",
"pathenv",
".",
"split",
"(",
"os",
".",
"pathsep",
")",
"# first find built-ins then the ones in the config, then the one from the environment",
"# so user plugins can override built-ins",
"for",
"p",
"in",
"reversed",
"(",
"paths",
")",
":",
"if",
"p",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"p",
")",
":",
"# in case of an empty string, we do not search!",
"plugins",
".",
"extend",
"(",
"self",
".",
"find_plugins",
"(",
"p",
")",
")",
"return",
"plugins"
] |
Return all plugins that are found in the plugin paths
Looks in the envvar ``JUKEBOX_PLUGIN_PATH``.
:returns:
:rtype:
:raises:
|
[
"Return",
"all",
"plugins",
"that",
"are",
"found",
"in",
"the",
"plugin",
"paths"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L335-L354
|
239,416
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
PluginManager.load_plugins
|
def load_plugins(self, ):
"""Loads all found plugins
:returns: None
:rtype: None
:raises: None
"""
for p in self.__plugins.values():
try:
self.load_plugin(p)
except errors.PluginInitError:
log.exception('Initializing the plugin: %s failed.' % p)
|
python
|
def load_plugins(self, ):
"""Loads all found plugins
:returns: None
:rtype: None
:raises: None
"""
for p in self.__plugins.values():
try:
self.load_plugin(p)
except errors.PluginInitError:
log.exception('Initializing the plugin: %s failed.' % p)
|
[
"def",
"load_plugins",
"(",
"self",
",",
")",
":",
"for",
"p",
"in",
"self",
".",
"__plugins",
".",
"values",
"(",
")",
":",
"try",
":",
"self",
".",
"load_plugin",
"(",
"p",
")",
"except",
"errors",
".",
"PluginInitError",
":",
"log",
".",
"exception",
"(",
"'Initializing the plugin: %s failed.'",
"%",
"p",
")"
] |
Loads all found plugins
:returns: None
:rtype: None
:raises: None
|
[
"Loads",
"all",
"found",
"plugins"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L356-L367
|
239,417
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
PluginManager.load_plugin
|
def load_plugin(self, p):
"""Load the specified plugin
:param p: The plugin to load
:type p: Subclass of JB_Plugin
:returns: None
:rtype: None
:raises: errors.PluginInitError
"""
if p.is_loaded():
return
# load required plugins first
reqnames = p.required
reqplugins = []
for name in reqnames:
try:
reqplugins.append(self.__plugins[name])
except KeyError as e:
log.error("Required Plugin %s not found. Cannot load %s." % (name, p))
raise errors.PluginInitError('Required Plugin %s not found. Cannot load %s. Reason: %s' % (name, p, e))
for plug in reqplugins:
try:
self.load_plugin(plug)
except errors.PluginInitError as e:
log.error("Required Plugin %s could not be loaded. Cannot load %s" % (plug, p))
raise errors.PluginInitError('Required Plugin %s could not be loaded. Cannot load %s. Reason: %s' % (plug,p, e))
# load the actual plugin
p._load()
log.info('Initialized the plugin: %s' % p)
|
python
|
def load_plugin(self, p):
"""Load the specified plugin
:param p: The plugin to load
:type p: Subclass of JB_Plugin
:returns: None
:rtype: None
:raises: errors.PluginInitError
"""
if p.is_loaded():
return
# load required plugins first
reqnames = p.required
reqplugins = []
for name in reqnames:
try:
reqplugins.append(self.__plugins[name])
except KeyError as e:
log.error("Required Plugin %s not found. Cannot load %s." % (name, p))
raise errors.PluginInitError('Required Plugin %s not found. Cannot load %s. Reason: %s' % (name, p, e))
for plug in reqplugins:
try:
self.load_plugin(plug)
except errors.PluginInitError as e:
log.error("Required Plugin %s could not be loaded. Cannot load %s" % (plug, p))
raise errors.PluginInitError('Required Plugin %s could not be loaded. Cannot load %s. Reason: %s' % (plug,p, e))
# load the actual plugin
p._load()
log.info('Initialized the plugin: %s' % p)
|
[
"def",
"load_plugin",
"(",
"self",
",",
"p",
")",
":",
"if",
"p",
".",
"is_loaded",
"(",
")",
":",
"return",
"# load required plugins first",
"reqnames",
"=",
"p",
".",
"required",
"reqplugins",
"=",
"[",
"]",
"for",
"name",
"in",
"reqnames",
":",
"try",
":",
"reqplugins",
".",
"append",
"(",
"self",
".",
"__plugins",
"[",
"name",
"]",
")",
"except",
"KeyError",
"as",
"e",
":",
"log",
".",
"error",
"(",
"\"Required Plugin %s not found. Cannot load %s.\"",
"%",
"(",
"name",
",",
"p",
")",
")",
"raise",
"errors",
".",
"PluginInitError",
"(",
"'Required Plugin %s not found. Cannot load %s. Reason: %s'",
"%",
"(",
"name",
",",
"p",
",",
"e",
")",
")",
"for",
"plug",
"in",
"reqplugins",
":",
"try",
":",
"self",
".",
"load_plugin",
"(",
"plug",
")",
"except",
"errors",
".",
"PluginInitError",
"as",
"e",
":",
"log",
".",
"error",
"(",
"\"Required Plugin %s could not be loaded. Cannot load %s\"",
"%",
"(",
"plug",
",",
"p",
")",
")",
"raise",
"errors",
".",
"PluginInitError",
"(",
"'Required Plugin %s could not be loaded. Cannot load %s. Reason: %s'",
"%",
"(",
"plug",
",",
"p",
",",
"e",
")",
")",
"# load the actual plugin",
"p",
".",
"_load",
"(",
")",
"log",
".",
"info",
"(",
"'Initialized the plugin: %s'",
"%",
"p",
")"
] |
Load the specified plugin
:param p: The plugin to load
:type p: Subclass of JB_Plugin
:returns: None
:rtype: None
:raises: errors.PluginInitError
|
[
"Load",
"the",
"specified",
"plugin"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L369-L397
|
239,418
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
PluginManager.unload_plugins
|
def unload_plugins(self, ):
""" Unloads all loaded plugins
:returns: None
:rtype: None
:raises: None
"""
for p in self.__plugins.values():
if p.is_loaded():
try:
p._unload()
log.info('Uninitialized the plugin: %s' % p)
except errors.PluginUninitError:
log.error('Uninitialization of the plugin: %s failed.' % p)
|
python
|
def unload_plugins(self, ):
""" Unloads all loaded plugins
:returns: None
:rtype: None
:raises: None
"""
for p in self.__plugins.values():
if p.is_loaded():
try:
p._unload()
log.info('Uninitialized the plugin: %s' % p)
except errors.PluginUninitError:
log.error('Uninitialization of the plugin: %s failed.' % p)
|
[
"def",
"unload_plugins",
"(",
"self",
",",
")",
":",
"for",
"p",
"in",
"self",
".",
"__plugins",
".",
"values",
"(",
")",
":",
"if",
"p",
".",
"is_loaded",
"(",
")",
":",
"try",
":",
"p",
".",
"_unload",
"(",
")",
"log",
".",
"info",
"(",
"'Uninitialized the plugin: %s'",
"%",
"p",
")",
"except",
"errors",
".",
"PluginUninitError",
":",
"log",
".",
"error",
"(",
"'Uninitialization of the plugin: %s failed.'",
"%",
"p",
")"
] |
Unloads all loaded plugins
:returns: None
:rtype: None
:raises: None
|
[
"Unloads",
"all",
"loaded",
"plugins"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L399-L412
|
239,419
|
JukeboxPipeline/jukebox-core
|
src/jukeboxcore/plugins.py
|
PluginManager.__import_file
|
def __import_file(self, f):
"""Import the specified file and return the imported module
:param f: the file to import
:type f: str
:returns: The imported module
:rtype: module
:raises: None
"""
directory, module_name = os.path.split(f)
module_name = os.path.splitext(module_name)[0]
path = list(sys.path)
sys.path.insert(0, directory)
module = __import__(module_name)
return module
|
python
|
def __import_file(self, f):
"""Import the specified file and return the imported module
:param f: the file to import
:type f: str
:returns: The imported module
:rtype: module
:raises: None
"""
directory, module_name = os.path.split(f)
module_name = os.path.splitext(module_name)[0]
path = list(sys.path)
sys.path.insert(0, directory)
module = __import__(module_name)
return module
|
[
"def",
"__import_file",
"(",
"self",
",",
"f",
")",
":",
"directory",
",",
"module_name",
"=",
"os",
".",
"path",
".",
"split",
"(",
"f",
")",
"module_name",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"module_name",
")",
"[",
"0",
"]",
"path",
"=",
"list",
"(",
"sys",
".",
"path",
")",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"directory",
")",
"module",
"=",
"__import__",
"(",
"module_name",
")",
"return",
"module"
] |
Import the specified file and return the imported module
:param f: the file to import
:type f: str
:returns: The imported module
:rtype: module
:raises: None
|
[
"Import",
"the",
"specified",
"file",
"and",
"return",
"the",
"imported",
"module"
] |
bac2280ca49940355270e4b69400ce9976ab2e6f
|
https://github.com/JukeboxPipeline/jukebox-core/blob/bac2280ca49940355270e4b69400ce9976ab2e6f/src/jukeboxcore/plugins.py#L414-L429
|
239,420
|
gevious/flask_slither
|
flask_slither/resources.py
|
BaseResource._exception_handler
|
def _exception_handler(self, e):
"""This exception handler catches should only be invoked when we need
to quit the workflow prematurely. It takes in an `ApiException`
which will contain the error, details of the error and status code.
When the application is in testing mode, it will also return the
stack trace"""
if isinstance(e.message, dict):
return self._prep_response(
e.message['msg'], status=e.message['status'])
if e.message.find('Validation') == 0:
return self._prep_response(self.validation.errors, status=400)
elif e.message.find('No record') == 0:
return self._prep_response(status=404)
else:
return self._prep_response({'message': e.message},
status=409)
|
python
|
def _exception_handler(self, e):
"""This exception handler catches should only be invoked when we need
to quit the workflow prematurely. It takes in an `ApiException`
which will contain the error, details of the error and status code.
When the application is in testing mode, it will also return the
stack trace"""
if isinstance(e.message, dict):
return self._prep_response(
e.message['msg'], status=e.message['status'])
if e.message.find('Validation') == 0:
return self._prep_response(self.validation.errors, status=400)
elif e.message.find('No record') == 0:
return self._prep_response(status=404)
else:
return self._prep_response({'message': e.message},
status=409)
|
[
"def",
"_exception_handler",
"(",
"self",
",",
"e",
")",
":",
"if",
"isinstance",
"(",
"e",
".",
"message",
",",
"dict",
")",
":",
"return",
"self",
".",
"_prep_response",
"(",
"e",
".",
"message",
"[",
"'msg'",
"]",
",",
"status",
"=",
"e",
".",
"message",
"[",
"'status'",
"]",
")",
"if",
"e",
".",
"message",
".",
"find",
"(",
"'Validation'",
")",
"==",
"0",
":",
"return",
"self",
".",
"_prep_response",
"(",
"self",
".",
"validation",
".",
"errors",
",",
"status",
"=",
"400",
")",
"elif",
"e",
".",
"message",
".",
"find",
"(",
"'No record'",
")",
"==",
"0",
":",
"return",
"self",
".",
"_prep_response",
"(",
"status",
"=",
"404",
")",
"else",
":",
"return",
"self",
".",
"_prep_response",
"(",
"{",
"'message'",
":",
"e",
".",
"message",
"}",
",",
"status",
"=",
"409",
")"
] |
This exception handler catches should only be invoked when we need
to quit the workflow prematurely. It takes in an `ApiException`
which will contain the error, details of the error and status code.
When the application is in testing mode, it will also return the
stack trace
|
[
"This",
"exception",
"handler",
"catches",
"should",
"only",
"be",
"invoked",
"when",
"we",
"need",
"to",
"quit",
"the",
"workflow",
"prematurely",
".",
"It",
"takes",
"in",
"an",
"ApiException",
"which",
"will",
"contain",
"the",
"error",
"details",
"of",
"the",
"error",
"and",
"status",
"code",
".",
"When",
"the",
"application",
"is",
"in",
"testing",
"mode",
"it",
"will",
"also",
"return",
"the",
"stack",
"trace"
] |
bf1fd1e58224c19883f4b19c5f727f47ee9857da
|
https://github.com/gevious/flask_slither/blob/bf1fd1e58224c19883f4b19c5f727f47ee9857da/flask_slither/resources.py#L68-L83
|
239,421
|
gevious/flask_slither
|
flask_slither/resources.py
|
BaseResource._get_instance
|
def _get_instance(self, **kwargs):
"""Loads the record specified by the `obj_id` path in the url and
stores it in g._resource_instance"""
current_app.logger.info("Getting instance")
current_app.logger.debug("kwargs: {}".format(kwargs))
current_app.logger.info(
"Loading instance: {}".format(kwargs['obj_id']))
rec = self.db_query.get_instance(self.db_collection, kwargs['obj_id'])
g._resource_instance = rec
current_app.logger.debug(
"g._resource_instance: {}".format(g._resource_instance))
return rec
|
python
|
def _get_instance(self, **kwargs):
"""Loads the record specified by the `obj_id` path in the url and
stores it in g._resource_instance"""
current_app.logger.info("Getting instance")
current_app.logger.debug("kwargs: {}".format(kwargs))
current_app.logger.info(
"Loading instance: {}".format(kwargs['obj_id']))
rec = self.db_query.get_instance(self.db_collection, kwargs['obj_id'])
g._resource_instance = rec
current_app.logger.debug(
"g._resource_instance: {}".format(g._resource_instance))
return rec
|
[
"def",
"_get_instance",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"current_app",
".",
"logger",
".",
"info",
"(",
"\"Getting instance\"",
")",
"current_app",
".",
"logger",
".",
"debug",
"(",
"\"kwargs: {}\"",
".",
"format",
"(",
"kwargs",
")",
")",
"current_app",
".",
"logger",
".",
"info",
"(",
"\"Loading instance: {}\"",
".",
"format",
"(",
"kwargs",
"[",
"'obj_id'",
"]",
")",
")",
"rec",
"=",
"self",
".",
"db_query",
".",
"get_instance",
"(",
"self",
".",
"db_collection",
",",
"kwargs",
"[",
"'obj_id'",
"]",
")",
"g",
".",
"_resource_instance",
"=",
"rec",
"current_app",
".",
"logger",
".",
"debug",
"(",
"\"g._resource_instance: {}\"",
".",
"format",
"(",
"g",
".",
"_resource_instance",
")",
")",
"return",
"rec"
] |
Loads the record specified by the `obj_id` path in the url and
stores it in g._resource_instance
|
[
"Loads",
"the",
"record",
"specified",
"by",
"the",
"obj_id",
"path",
"in",
"the",
"url",
"and",
"stores",
"it",
"in",
"g",
".",
"_resource_instance"
] |
bf1fd1e58224c19883f4b19c5f727f47ee9857da
|
https://github.com/gevious/flask_slither/blob/bf1fd1e58224c19883f4b19c5f727f47ee9857da/flask_slither/resources.py#L85-L97
|
239,422
|
gevious/flask_slither
|
flask_slither/resources.py
|
BaseResource.merge_record_data
|
def merge_record_data(self, changes, orig_record=None):
"""This method merges PATCH requests with the db record to ensure no
data is lost. In addition, it is also a hook for other fields to
be overwritten, to ensure immutable fields aren't changed by a
request."""
current_app.logger.info("Merging request data with db record")
current_app.logger.debug("orig_record: {}".format(orig_record))
current_app.logger.debug("Changes".format(changes))
final_record = changes
if request.method == 'PATCH':
final_record = dict(orig_record)
final_record.update(changes)
elif request.method == 'PUT':
if '_id' in orig_record:
final_record['_id'] = orig_record['_id']
return final_record
|
python
|
def merge_record_data(self, changes, orig_record=None):
"""This method merges PATCH requests with the db record to ensure no
data is lost. In addition, it is also a hook for other fields to
be overwritten, to ensure immutable fields aren't changed by a
request."""
current_app.logger.info("Merging request data with db record")
current_app.logger.debug("orig_record: {}".format(orig_record))
current_app.logger.debug("Changes".format(changes))
final_record = changes
if request.method == 'PATCH':
final_record = dict(orig_record)
final_record.update(changes)
elif request.method == 'PUT':
if '_id' in orig_record:
final_record['_id'] = orig_record['_id']
return final_record
|
[
"def",
"merge_record_data",
"(",
"self",
",",
"changes",
",",
"orig_record",
"=",
"None",
")",
":",
"current_app",
".",
"logger",
".",
"info",
"(",
"\"Merging request data with db record\"",
")",
"current_app",
".",
"logger",
".",
"debug",
"(",
"\"orig_record: {}\"",
".",
"format",
"(",
"orig_record",
")",
")",
"current_app",
".",
"logger",
".",
"debug",
"(",
"\"Changes\"",
".",
"format",
"(",
"changes",
")",
")",
"final_record",
"=",
"changes",
"if",
"request",
".",
"method",
"==",
"'PATCH'",
":",
"final_record",
"=",
"dict",
"(",
"orig_record",
")",
"final_record",
".",
"update",
"(",
"changes",
")",
"elif",
"request",
".",
"method",
"==",
"'PUT'",
":",
"if",
"'_id'",
"in",
"orig_record",
":",
"final_record",
"[",
"'_id'",
"]",
"=",
"orig_record",
"[",
"'_id'",
"]",
"return",
"final_record"
] |
This method merges PATCH requests with the db record to ensure no
data is lost. In addition, it is also a hook for other fields to
be overwritten, to ensure immutable fields aren't changed by a
request.
|
[
"This",
"method",
"merges",
"PATCH",
"requests",
"with",
"the",
"db",
"record",
"to",
"ensure",
"no",
"data",
"is",
"lost",
".",
"In",
"addition",
"it",
"is",
"also",
"a",
"hook",
"for",
"other",
"fields",
"to",
"be",
"overwritten",
"to",
"ensure",
"immutable",
"fields",
"aren",
"t",
"changed",
"by",
"a",
"request",
"."
] |
bf1fd1e58224c19883f4b19c5f727f47ee9857da
|
https://github.com/gevious/flask_slither/blob/bf1fd1e58224c19883f4b19c5f727f47ee9857da/flask_slither/resources.py#L173-L188
|
239,423
|
SeabornGames/RequestClient
|
seaborn/request_client/repr_wrapper.py
|
repr_return
|
def repr_return(func):
"""
This is a decorator to give the return value a pretty print repr
"""
def repr_return_decorator(*args, **kwargs):
ret = func(*args, **kwargs)
if isinstance(ret, basestring):
return ret
if type(ret) in repr_map:
return repr_map[type(ret)](ret)
print('=' * 80 + '\n' +
' FAILED TO GET REPR RETURN for type (' +
str(type(ret)) + '\n' + '=' * 80)
return ret
return repr_return_decorator
|
python
|
def repr_return(func):
"""
This is a decorator to give the return value a pretty print repr
"""
def repr_return_decorator(*args, **kwargs):
ret = func(*args, **kwargs)
if isinstance(ret, basestring):
return ret
if type(ret) in repr_map:
return repr_map[type(ret)](ret)
print('=' * 80 + '\n' +
' FAILED TO GET REPR RETURN for type (' +
str(type(ret)) + '\n' + '=' * 80)
return ret
return repr_return_decorator
|
[
"def",
"repr_return",
"(",
"func",
")",
":",
"def",
"repr_return_decorator",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"isinstance",
"(",
"ret",
",",
"basestring",
")",
":",
"return",
"ret",
"if",
"type",
"(",
"ret",
")",
"in",
"repr_map",
":",
"return",
"repr_map",
"[",
"type",
"(",
"ret",
")",
"]",
"(",
"ret",
")",
"print",
"(",
"'='",
"*",
"80",
"+",
"'\\n'",
"+",
"' FAILED TO GET REPR RETURN for type ('",
"+",
"str",
"(",
"type",
"(",
"ret",
")",
")",
"+",
"'\\n'",
"+",
"'='",
"*",
"80",
")",
"return",
"ret",
"return",
"repr_return_decorator"
] |
This is a decorator to give the return value a pretty print repr
|
[
"This",
"is",
"a",
"decorator",
"to",
"give",
"the",
"return",
"value",
"a",
"pretty",
"print",
"repr"
] |
21aeb951ddfdb6ee453ad0edc896ff224e06425d
|
https://github.com/SeabornGames/RequestClient/blob/21aeb951ddfdb6ee453ad0edc896ff224e06425d/seaborn/request_client/repr_wrapper.py#L71-L90
|
239,424
|
SeabornGames/RequestClient
|
seaborn/request_client/repr_wrapper.py
|
ReprTuple.repr_setup
|
def repr_setup(self, name=None, col_names=None, col_types=None):
"""
This wasn't safe to pass into init because of the inheritance
"""
self._name = name or self._name
self._col_types = col_types or self._col_types
|
python
|
def repr_setup(self, name=None, col_names=None, col_types=None):
"""
This wasn't safe to pass into init because of the inheritance
"""
self._name = name or self._name
self._col_types = col_types or self._col_types
|
[
"def",
"repr_setup",
"(",
"self",
",",
"name",
"=",
"None",
",",
"col_names",
"=",
"None",
",",
"col_types",
"=",
"None",
")",
":",
"self",
".",
"_name",
"=",
"name",
"or",
"self",
".",
"_name",
"self",
".",
"_col_types",
"=",
"col_types",
"or",
"self",
".",
"_col_types"
] |
This wasn't safe to pass into init because of the inheritance
|
[
"This",
"wasn",
"t",
"safe",
"to",
"pass",
"into",
"init",
"because",
"of",
"the",
"inheritance"
] |
21aeb951ddfdb6ee453ad0edc896ff224e06425d
|
https://github.com/SeabornGames/RequestClient/blob/21aeb951ddfdb6ee453ad0edc896ff224e06425d/seaborn/request_client/repr_wrapper.py#L356-L361
|
239,425
|
openvax/typechecks
|
typechecks/__init__.py
|
require_string
|
def require_string(obj, name=None, nonempty=False):
"""
Raise an exception if the obj is not of type str or unicode.
If name is provided it is used in the exception message.
If nonempty=True, then an exception is raised if the object is the empty
string.
"""
require_instance(obj, string_types, name, "string")
if nonempty and not obj:
raise ValueError(
(("%s: " % name) if name else "") +
"string must be nonempty.")
|
python
|
def require_string(obj, name=None, nonempty=False):
"""
Raise an exception if the obj is not of type str or unicode.
If name is provided it is used in the exception message.
If nonempty=True, then an exception is raised if the object is the empty
string.
"""
require_instance(obj, string_types, name, "string")
if nonempty and not obj:
raise ValueError(
(("%s: " % name) if name else "") +
"string must be nonempty.")
|
[
"def",
"require_string",
"(",
"obj",
",",
"name",
"=",
"None",
",",
"nonempty",
"=",
"False",
")",
":",
"require_instance",
"(",
"obj",
",",
"string_types",
",",
"name",
",",
"\"string\"",
")",
"if",
"nonempty",
"and",
"not",
"obj",
":",
"raise",
"ValueError",
"(",
"(",
"(",
"\"%s: \"",
"%",
"name",
")",
"if",
"name",
"else",
"\"\"",
")",
"+",
"\"string must be nonempty.\"",
")"
] |
Raise an exception if the obj is not of type str or unicode.
If name is provided it is used in the exception message.
If nonempty=True, then an exception is raised if the object is the empty
string.
|
[
"Raise",
"an",
"exception",
"if",
"the",
"obj",
"is",
"not",
"of",
"type",
"str",
"or",
"unicode",
"."
] |
5340b4e8a2f419b3a7aa816a5b19e2e0a6ce0679
|
https://github.com/openvax/typechecks/blob/5340b4e8a2f419b3a7aa816a5b19e2e0a6ce0679/typechecks/__init__.py#L32-L45
|
239,426
|
openvax/typechecks
|
typechecks/__init__.py
|
require_instance
|
def require_instance(obj, types=None, name=None, type_name=None, truncate_at=80):
"""
Raise an exception if obj is not an instance of one of the specified types.
Similarly to isinstance, 'types' may be either a single type or a tuple of
types.
If name or type_name is provided, it is used in the exception message.
The object's string representation is also included in the message,
truncated to 'truncate_at' number of characters.
"""
if not isinstance(obj, types):
obj_string = str(obj)
if len(obj_string) > truncate_at:
obj_string = obj_string[:truncate_at - 3] + "..."
if type_name is None:
try:
type_name = "one of " + ", ".join(str(t) for t in types)
except TypeError:
type_name = str(types)
name_string = ("%s: " % name) if name else ""
error_message = "%sexpected %s. Got: '%s' of type '%s'" % (
name_string, type_name, obj_string, type(obj))
raise TypeError(error_message)
|
python
|
def require_instance(obj, types=None, name=None, type_name=None, truncate_at=80):
"""
Raise an exception if obj is not an instance of one of the specified types.
Similarly to isinstance, 'types' may be either a single type or a tuple of
types.
If name or type_name is provided, it is used in the exception message.
The object's string representation is also included in the message,
truncated to 'truncate_at' number of characters.
"""
if not isinstance(obj, types):
obj_string = str(obj)
if len(obj_string) > truncate_at:
obj_string = obj_string[:truncate_at - 3] + "..."
if type_name is None:
try:
type_name = "one of " + ", ".join(str(t) for t in types)
except TypeError:
type_name = str(types)
name_string = ("%s: " % name) if name else ""
error_message = "%sexpected %s. Got: '%s' of type '%s'" % (
name_string, type_name, obj_string, type(obj))
raise TypeError(error_message)
|
[
"def",
"require_instance",
"(",
"obj",
",",
"types",
"=",
"None",
",",
"name",
"=",
"None",
",",
"type_name",
"=",
"None",
",",
"truncate_at",
"=",
"80",
")",
":",
"if",
"not",
"isinstance",
"(",
"obj",
",",
"types",
")",
":",
"obj_string",
"=",
"str",
"(",
"obj",
")",
"if",
"len",
"(",
"obj_string",
")",
">",
"truncate_at",
":",
"obj_string",
"=",
"obj_string",
"[",
":",
"truncate_at",
"-",
"3",
"]",
"+",
"\"...\"",
"if",
"type_name",
"is",
"None",
":",
"try",
":",
"type_name",
"=",
"\"one of \"",
"+",
"\", \"",
".",
"join",
"(",
"str",
"(",
"t",
")",
"for",
"t",
"in",
"types",
")",
"except",
"TypeError",
":",
"type_name",
"=",
"str",
"(",
"types",
")",
"name_string",
"=",
"(",
"\"%s: \"",
"%",
"name",
")",
"if",
"name",
"else",
"\"\"",
"error_message",
"=",
"\"%sexpected %s. Got: '%s' of type '%s'\"",
"%",
"(",
"name_string",
",",
"type_name",
",",
"obj_string",
",",
"type",
"(",
"obj",
")",
")",
"raise",
"TypeError",
"(",
"error_message",
")"
] |
Raise an exception if obj is not an instance of one of the specified types.
Similarly to isinstance, 'types' may be either a single type or a tuple of
types.
If name or type_name is provided, it is used in the exception message.
The object's string representation is also included in the message,
truncated to 'truncate_at' number of characters.
|
[
"Raise",
"an",
"exception",
"if",
"obj",
"is",
"not",
"an",
"instance",
"of",
"one",
"of",
"the",
"specified",
"types",
"."
] |
5340b4e8a2f419b3a7aa816a5b19e2e0a6ce0679
|
https://github.com/openvax/typechecks/blob/5340b4e8a2f419b3a7aa816a5b19e2e0a6ce0679/typechecks/__init__.py#L59-L82
|
239,427
|
openvax/typechecks
|
typechecks/__init__.py
|
require_iterable_of
|
def require_iterable_of(objs, types, name=None, type_name=None, truncate_at=80):
"""
Raise an exception if objs is not an iterable with each element an instance
of one of the specified types.
See `require_instance` for descriptions of the other parameters.
"""
# Fast pass for common case where all types are correct.
# This avoids the more expensive loop below. A typical speedup from this
# optimization is 6.6 sec -> 1.7 sec, for testing a list of size 10,000,000.
try:
if all(isinstance(obj, types) for obj in objs):
return
except TypeError:
# We don't require that objs is a list in this function, just that it's
# iterable. We specify 'list' below as a convenient way to throw the
# desired error.
require_instance(objs, list, name, "iterable", truncate_at)
# Some type isn't correct. We reuse the require_instance function to raise
# the exception.
prefix = ("%s: " % name) if name else ""
for (i, obj) in enumerate(objs):
element_name = prefix + ("element at index %d" % i)
require_instance(obj, types, element_name, type_name, truncate_at)
assert False, "Shouldn't reach here."
|
python
|
def require_iterable_of(objs, types, name=None, type_name=None, truncate_at=80):
"""
Raise an exception if objs is not an iterable with each element an instance
of one of the specified types.
See `require_instance` for descriptions of the other parameters.
"""
# Fast pass for common case where all types are correct.
# This avoids the more expensive loop below. A typical speedup from this
# optimization is 6.6 sec -> 1.7 sec, for testing a list of size 10,000,000.
try:
if all(isinstance(obj, types) for obj in objs):
return
except TypeError:
# We don't require that objs is a list in this function, just that it's
# iterable. We specify 'list' below as a convenient way to throw the
# desired error.
require_instance(objs, list, name, "iterable", truncate_at)
# Some type isn't correct. We reuse the require_instance function to raise
# the exception.
prefix = ("%s: " % name) if name else ""
for (i, obj) in enumerate(objs):
element_name = prefix + ("element at index %d" % i)
require_instance(obj, types, element_name, type_name, truncate_at)
assert False, "Shouldn't reach here."
|
[
"def",
"require_iterable_of",
"(",
"objs",
",",
"types",
",",
"name",
"=",
"None",
",",
"type_name",
"=",
"None",
",",
"truncate_at",
"=",
"80",
")",
":",
"# Fast pass for common case where all types are correct.",
"# This avoids the more expensive loop below. A typical speedup from this",
"# optimization is 6.6 sec -> 1.7 sec, for testing a list of size 10,000,000.",
"try",
":",
"if",
"all",
"(",
"isinstance",
"(",
"obj",
",",
"types",
")",
"for",
"obj",
"in",
"objs",
")",
":",
"return",
"except",
"TypeError",
":",
"# We don't require that objs is a list in this function, just that it's",
"# iterable. We specify 'list' below as a convenient way to throw the",
"# desired error.",
"require_instance",
"(",
"objs",
",",
"list",
",",
"name",
",",
"\"iterable\"",
",",
"truncate_at",
")",
"# Some type isn't correct. We reuse the require_instance function to raise",
"# the exception.",
"prefix",
"=",
"(",
"\"%s: \"",
"%",
"name",
")",
"if",
"name",
"else",
"\"\"",
"for",
"(",
"i",
",",
"obj",
")",
"in",
"enumerate",
"(",
"objs",
")",
":",
"element_name",
"=",
"prefix",
"+",
"(",
"\"element at index %d\"",
"%",
"i",
")",
"require_instance",
"(",
"obj",
",",
"types",
",",
"element_name",
",",
"type_name",
",",
"truncate_at",
")",
"assert",
"False",
",",
"\"Shouldn't reach here.\""
] |
Raise an exception if objs is not an iterable with each element an instance
of one of the specified types.
See `require_instance` for descriptions of the other parameters.
|
[
"Raise",
"an",
"exception",
"if",
"objs",
"is",
"not",
"an",
"iterable",
"with",
"each",
"element",
"an",
"instance",
"of",
"one",
"of",
"the",
"specified",
"types",
"."
] |
5340b4e8a2f419b3a7aa816a5b19e2e0a6ce0679
|
https://github.com/openvax/typechecks/blob/5340b4e8a2f419b3a7aa816a5b19e2e0a6ce0679/typechecks/__init__.py#L84-L109
|
239,428
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/reflect/RecursiveObjectWriter.py
|
RecursiveObjectWriter.set_property
|
def set_property(obj, name, value):
"""
Recursively sets value of object and its subobjects property specified by its name.
The object can be a user defined object, map or array.
The property name correspondently must be object property, map key or array index.
If the property does not exist or introspection fails
this method doesn't do anything and doesn't any throw errors.
:param obj: an object to write property to.
:param name: a name of the property to set.
:param value: a new value for the property to set.
"""
if obj == None or name == None:
return
names = name.split(".")
if names == None or len(names) == 0:
return
RecursiveObjectWriter._perform_set_property(obj, names, 0, value)
|
python
|
def set_property(obj, name, value):
"""
Recursively sets value of object and its subobjects property specified by its name.
The object can be a user defined object, map or array.
The property name correspondently must be object property, map key or array index.
If the property does not exist or introspection fails
this method doesn't do anything and doesn't any throw errors.
:param obj: an object to write property to.
:param name: a name of the property to set.
:param value: a new value for the property to set.
"""
if obj == None or name == None:
return
names = name.split(".")
if names == None or len(names) == 0:
return
RecursiveObjectWriter._perform_set_property(obj, names, 0, value)
|
[
"def",
"set_property",
"(",
"obj",
",",
"name",
",",
"value",
")",
":",
"if",
"obj",
"==",
"None",
"or",
"name",
"==",
"None",
":",
"return",
"names",
"=",
"name",
".",
"split",
"(",
"\".\"",
")",
"if",
"names",
"==",
"None",
"or",
"len",
"(",
"names",
")",
"==",
"0",
":",
"return",
"RecursiveObjectWriter",
".",
"_perform_set_property",
"(",
"obj",
",",
"names",
",",
"0",
",",
"value",
")"
] |
Recursively sets value of object and its subobjects property specified by its name.
The object can be a user defined object, map or array.
The property name correspondently must be object property, map key or array index.
If the property does not exist or introspection fails
this method doesn't do anything and doesn't any throw errors.
:param obj: an object to write property to.
:param name: a name of the property to set.
:param value: a new value for the property to set.
|
[
"Recursively",
"sets",
"value",
"of",
"object",
"and",
"its",
"subobjects",
"property",
"specified",
"by",
"its",
"name",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/reflect/RecursiveObjectWriter.py#L45-L68
|
239,429
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/reflect/RecursiveObjectWriter.py
|
RecursiveObjectWriter.copy_properties
|
def copy_properties(dest, src):
"""
Copies content of one object to another object
by recursively reading all properties from source object
and then recursively writing them to destination object.
:param dest: a destination object to write properties to.
:param src: a source object to read properties from
"""
if dest == None or src == None:
return
values = RecursiveObjectReader.get_properties(src)
RecursiveObjectWriter.set_properties(dest, values)
|
python
|
def copy_properties(dest, src):
"""
Copies content of one object to another object
by recursively reading all properties from source object
and then recursively writing them to destination object.
:param dest: a destination object to write properties to.
:param src: a source object to read properties from
"""
if dest == None or src == None:
return
values = RecursiveObjectReader.get_properties(src)
RecursiveObjectWriter.set_properties(dest, values)
|
[
"def",
"copy_properties",
"(",
"dest",
",",
"src",
")",
":",
"if",
"dest",
"==",
"None",
"or",
"src",
"==",
"None",
":",
"return",
"values",
"=",
"RecursiveObjectReader",
".",
"get_properties",
"(",
"src",
")",
"RecursiveObjectWriter",
".",
"set_properties",
"(",
"dest",
",",
"values",
")"
] |
Copies content of one object to another object
by recursively reading all properties from source object
and then recursively writing them to destination object.
:param dest: a destination object to write properties to.
:param src: a source object to read properties from
|
[
"Copies",
"content",
"of",
"one",
"object",
"to",
"another",
"object",
"by",
"recursively",
"reading",
"all",
"properties",
"from",
"source",
"object",
"and",
"then",
"recursively",
"writing",
"them",
"to",
"destination",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/reflect/RecursiveObjectWriter.py#L93-L107
|
239,430
|
zabertech/python-izaber
|
izaber/date.py
|
time_range_cutter_at_time
|
def time_range_cutter_at_time(local,time_range,time_cut=(0,0,0)):
""" Given a range, return a list of DateTimes that match the time_cut
between start and end.
:param local: if False [default] use UTC datetime. If True use localtz
:param time_range: the TimeRange object
:param time_cut: HH:MM:SS of when to cut. eg: (0,0,0) for midnight
"""
( start, end ) = time_range.get(local)
index = start.replace(
hour=time_cut[0],
minute=time_cut[1],
second=time_cut[2]
)
cuts = []
index += datetime.timedelta(days=1)
while index < end:
cuts.append(index)
index += datetime.timedelta(days=1)
if local:
index = time_range.normalize(index)
return cuts
|
python
|
def time_range_cutter_at_time(local,time_range,time_cut=(0,0,0)):
""" Given a range, return a list of DateTimes that match the time_cut
between start and end.
:param local: if False [default] use UTC datetime. If True use localtz
:param time_range: the TimeRange object
:param time_cut: HH:MM:SS of when to cut. eg: (0,0,0) for midnight
"""
( start, end ) = time_range.get(local)
index = start.replace(
hour=time_cut[0],
minute=time_cut[1],
second=time_cut[2]
)
cuts = []
index += datetime.timedelta(days=1)
while index < end:
cuts.append(index)
index += datetime.timedelta(days=1)
if local:
index = time_range.normalize(index)
return cuts
|
[
"def",
"time_range_cutter_at_time",
"(",
"local",
",",
"time_range",
",",
"time_cut",
"=",
"(",
"0",
",",
"0",
",",
"0",
")",
")",
":",
"(",
"start",
",",
"end",
")",
"=",
"time_range",
".",
"get",
"(",
"local",
")",
"index",
"=",
"start",
".",
"replace",
"(",
"hour",
"=",
"time_cut",
"[",
"0",
"]",
",",
"minute",
"=",
"time_cut",
"[",
"1",
"]",
",",
"second",
"=",
"time_cut",
"[",
"2",
"]",
")",
"cuts",
"=",
"[",
"]",
"index",
"+=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"while",
"index",
"<",
"end",
":",
"cuts",
".",
"append",
"(",
"index",
")",
"index",
"+=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"if",
"local",
":",
"index",
"=",
"time_range",
".",
"normalize",
"(",
"index",
")",
"return",
"cuts"
] |
Given a range, return a list of DateTimes that match the time_cut
between start and end.
:param local: if False [default] use UTC datetime. If True use localtz
:param time_range: the TimeRange object
:param time_cut: HH:MM:SS of when to cut. eg: (0,0,0) for midnight
|
[
"Given",
"a",
"range",
"return",
"a",
"list",
"of",
"DateTimes",
"that",
"match",
"the",
"time_cut",
"between",
"start",
"and",
"end",
"."
] |
729bf9ef637e084c8ab3cc16c34cf659d3a79ee4
|
https://github.com/zabertech/python-izaber/blob/729bf9ef637e084c8ab3cc16c34cf659d3a79ee4/izaber/date.py#L11-L34
|
239,431
|
zabertech/python-izaber
|
izaber/date.py
|
TimeRange.hours
|
def hours(self,local=False):
""" Returns the number of hours of difference
"""
delta = self.delta(local)
return delta.total_seconds()/3600
|
python
|
def hours(self,local=False):
""" Returns the number of hours of difference
"""
delta = self.delta(local)
return delta.total_seconds()/3600
|
[
"def",
"hours",
"(",
"self",
",",
"local",
"=",
"False",
")",
":",
"delta",
"=",
"self",
".",
"delta",
"(",
"local",
")",
"return",
"delta",
".",
"total_seconds",
"(",
")",
"/",
"3600"
] |
Returns the number of hours of difference
|
[
"Returns",
"the",
"number",
"of",
"hours",
"of",
"difference"
] |
729bf9ef637e084c8ab3cc16c34cf659d3a79ee4
|
https://github.com/zabertech/python-izaber/blob/729bf9ef637e084c8ab3cc16c34cf659d3a79ee4/izaber/date.py#L60-L64
|
239,432
|
zabertech/python-izaber
|
izaber/date.py
|
TimeRange.chunks
|
def chunks(self,local,cutter_callback=None,*cutter_callback_args,**cutter_callback_kwargs):
""" Takes a time range and returns sub timeranges based upon
the cuts that cutter callback provides
:param local: if False [default] use UTC datetime. If True use localtz
:param cutter_callback: This should be a callback function that
takes the current TimeRange and returns a list of
DateTimes that denote where the next chunk should
start. By default the cutter used will cut at 00:00:00
each day.
"""
# First we get all the slices we want to take out of this time range.
if not cutter_callback:
cutter_callback = time_range_cutter_at_time
time_cuts = cutter_callback(local,self,*cutter_callback_args,**cutter_callback_kwargs)
# Now we need to make the time range objects for the cuts
time_chunks = []
time_index = self.start_time
time_cuts = sorted(time_cuts)
for time_cut in time_cuts:
# FIXME: Better error message is probably going to be helpful
if self.end_time < time_cut or self.start_time > time_cut:
raise Exception('Cut time provided that is outside of time range')
# Create the new tail end entry for times, combine it with the
# index to craft a timerange and add it to the chunk list
cut_end_time = time_cut
# If the chunk is not the final chunk, we want to pull
# the time back by a small smidgen
if cut_end_time != self.end_time:
cut_end_time -= datetime.timedelta(microseconds=1)
time_ending = DateTime(
data=cut_end_time,
data_tz=time_index.local_tz,
local_tz=time_index.local_tz
)
chunk_range = TimeRange(time_index,time_ending)
time_chunks.append(chunk_range)
# Setup the index for the next cut (the current cut
# becomes the start of the next cut)
time_index = DateTime(
data=time_cut,
data_tz=time_index.local_tz,
local_tz=time_index.local_tz
)
# Add the last segment if required
if time_index != self.end_time:
time_chunks.append(
TimeRange(time_index,self.end_time)
)
return time_chunks
|
python
|
def chunks(self,local,cutter_callback=None,*cutter_callback_args,**cutter_callback_kwargs):
""" Takes a time range and returns sub timeranges based upon
the cuts that cutter callback provides
:param local: if False [default] use UTC datetime. If True use localtz
:param cutter_callback: This should be a callback function that
takes the current TimeRange and returns a list of
DateTimes that denote where the next chunk should
start. By default the cutter used will cut at 00:00:00
each day.
"""
# First we get all the slices we want to take out of this time range.
if not cutter_callback:
cutter_callback = time_range_cutter_at_time
time_cuts = cutter_callback(local,self,*cutter_callback_args,**cutter_callback_kwargs)
# Now we need to make the time range objects for the cuts
time_chunks = []
time_index = self.start_time
time_cuts = sorted(time_cuts)
for time_cut in time_cuts:
# FIXME: Better error message is probably going to be helpful
if self.end_time < time_cut or self.start_time > time_cut:
raise Exception('Cut time provided that is outside of time range')
# Create the new tail end entry for times, combine it with the
# index to craft a timerange and add it to the chunk list
cut_end_time = time_cut
# If the chunk is not the final chunk, we want to pull
# the time back by a small smidgen
if cut_end_time != self.end_time:
cut_end_time -= datetime.timedelta(microseconds=1)
time_ending = DateTime(
data=cut_end_time,
data_tz=time_index.local_tz,
local_tz=time_index.local_tz
)
chunk_range = TimeRange(time_index,time_ending)
time_chunks.append(chunk_range)
# Setup the index for the next cut (the current cut
# becomes the start of the next cut)
time_index = DateTime(
data=time_cut,
data_tz=time_index.local_tz,
local_tz=time_index.local_tz
)
# Add the last segment if required
if time_index != self.end_time:
time_chunks.append(
TimeRange(time_index,self.end_time)
)
return time_chunks
|
[
"def",
"chunks",
"(",
"self",
",",
"local",
",",
"cutter_callback",
"=",
"None",
",",
"*",
"cutter_callback_args",
",",
"*",
"*",
"cutter_callback_kwargs",
")",
":",
"# First we get all the slices we want to take out of this time range.",
"if",
"not",
"cutter_callback",
":",
"cutter_callback",
"=",
"time_range_cutter_at_time",
"time_cuts",
"=",
"cutter_callback",
"(",
"local",
",",
"self",
",",
"*",
"cutter_callback_args",
",",
"*",
"*",
"cutter_callback_kwargs",
")",
"# Now we need to make the time range objects for the cuts",
"time_chunks",
"=",
"[",
"]",
"time_index",
"=",
"self",
".",
"start_time",
"time_cuts",
"=",
"sorted",
"(",
"time_cuts",
")",
"for",
"time_cut",
"in",
"time_cuts",
":",
"# FIXME: Better error message is probably going to be helpful",
"if",
"self",
".",
"end_time",
"<",
"time_cut",
"or",
"self",
".",
"start_time",
">",
"time_cut",
":",
"raise",
"Exception",
"(",
"'Cut time provided that is outside of time range'",
")",
"# Create the new tail end entry for times, combine it with the",
"# index to craft a timerange and add it to the chunk list",
"cut_end_time",
"=",
"time_cut",
"# If the chunk is not the final chunk, we want to pull",
"# the time back by a small smidgen",
"if",
"cut_end_time",
"!=",
"self",
".",
"end_time",
":",
"cut_end_time",
"-=",
"datetime",
".",
"timedelta",
"(",
"microseconds",
"=",
"1",
")",
"time_ending",
"=",
"DateTime",
"(",
"data",
"=",
"cut_end_time",
",",
"data_tz",
"=",
"time_index",
".",
"local_tz",
",",
"local_tz",
"=",
"time_index",
".",
"local_tz",
")",
"chunk_range",
"=",
"TimeRange",
"(",
"time_index",
",",
"time_ending",
")",
"time_chunks",
".",
"append",
"(",
"chunk_range",
")",
"# Setup the index for the next cut (the current cut",
"# becomes the start of the next cut)",
"time_index",
"=",
"DateTime",
"(",
"data",
"=",
"time_cut",
",",
"data_tz",
"=",
"time_index",
".",
"local_tz",
",",
"local_tz",
"=",
"time_index",
".",
"local_tz",
")",
"# Add the last segment if required",
"if",
"time_index",
"!=",
"self",
".",
"end_time",
":",
"time_chunks",
".",
"append",
"(",
"TimeRange",
"(",
"time_index",
",",
"self",
".",
"end_time",
")",
")",
"return",
"time_chunks"
] |
Takes a time range and returns sub timeranges based upon
the cuts that cutter callback provides
:param local: if False [default] use UTC datetime. If True use localtz
:param cutter_callback: This should be a callback function that
takes the current TimeRange and returns a list of
DateTimes that denote where the next chunk should
start. By default the cutter used will cut at 00:00:00
each day.
|
[
"Takes",
"a",
"time",
"range",
"and",
"returns",
"sub",
"timeranges",
"based",
"upon",
"the",
"cuts",
"that",
"cutter",
"callback",
"provides"
] |
729bf9ef637e084c8ab3cc16c34cf659d3a79ee4
|
https://github.com/zabertech/python-izaber/blob/729bf9ef637e084c8ab3cc16c34cf659d3a79ee4/izaber/date.py#L87-L147
|
239,433
|
zabertech/python-izaber
|
izaber/date.py
|
TimeRange.str
|
def str(self,local):
""" Return the string representation of the time range
:param local: if False [default] use UTC datetime. If True use localtz
"""
s = self.start_time.str(local) \
+ u" to " \
+ self.end_time.str(local)
return s
|
python
|
def str(self,local):
""" Return the string representation of the time range
:param local: if False [default] use UTC datetime. If True use localtz
"""
s = self.start_time.str(local) \
+ u" to " \
+ self.end_time.str(local)
return s
|
[
"def",
"str",
"(",
"self",
",",
"local",
")",
":",
"s",
"=",
"self",
".",
"start_time",
".",
"str",
"(",
"local",
")",
"+",
"u\" to \"",
"+",
"self",
".",
"end_time",
".",
"str",
"(",
"local",
")",
"return",
"s"
] |
Return the string representation of the time range
:param local: if False [default] use UTC datetime. If True use localtz
|
[
"Return",
"the",
"string",
"representation",
"of",
"the",
"time",
"range"
] |
729bf9ef637e084c8ab3cc16c34cf659d3a79ee4
|
https://github.com/zabertech/python-izaber/blob/729bf9ef637e084c8ab3cc16c34cf659d3a79ee4/izaber/date.py#L149-L157
|
239,434
|
zabertech/python-izaber
|
izaber/date.py
|
DateTime.daily_hours
|
def daily_hours(self,local=False):
""" This returns a number from 0 to 24 that describes the number
of hours passed in a day. This is very useful for hr.attendances
"""
data = self.get(local)
daily_hours = (data.hour +
data.minute / 60.0 +
data.second / 3600.0)
return round(daily_hours,2)
|
python
|
def daily_hours(self,local=False):
""" This returns a number from 0 to 24 that describes the number
of hours passed in a day. This is very useful for hr.attendances
"""
data = self.get(local)
daily_hours = (data.hour +
data.minute / 60.0 +
data.second / 3600.0)
return round(daily_hours,2)
|
[
"def",
"daily_hours",
"(",
"self",
",",
"local",
"=",
"False",
")",
":",
"data",
"=",
"self",
".",
"get",
"(",
"local",
")",
"daily_hours",
"=",
"(",
"data",
".",
"hour",
"+",
"data",
".",
"minute",
"/",
"60.0",
"+",
"data",
".",
"second",
"/",
"3600.0",
")",
"return",
"round",
"(",
"daily_hours",
",",
"2",
")"
] |
This returns a number from 0 to 24 that describes the number
of hours passed in a day. This is very useful for hr.attendances
|
[
"This",
"returns",
"a",
"number",
"from",
"0",
"to",
"24",
"that",
"describes",
"the",
"number",
"of",
"hours",
"passed",
"in",
"a",
"day",
".",
"This",
"is",
"very",
"useful",
"for",
"hr",
".",
"attendances"
] |
729bf9ef637e084c8ab3cc16c34cf659d3a79ee4
|
https://github.com/zabertech/python-izaber/blob/729bf9ef637e084c8ab3cc16c34cf659d3a79ee4/izaber/date.py#L340-L348
|
239,435
|
zabertech/python-izaber
|
izaber/date.py
|
DateTime.str
|
def str(self,local=False,ifempty=None):
""" Returns the string representation of the datetime
"""
ts = self.get(local)
if not ts: return ifempty
return ts.strftime('%Y-%m-%d %H:%M:%S')
|
python
|
def str(self,local=False,ifempty=None):
""" Returns the string representation of the datetime
"""
ts = self.get(local)
if not ts: return ifempty
return ts.strftime('%Y-%m-%d %H:%M:%S')
|
[
"def",
"str",
"(",
"self",
",",
"local",
"=",
"False",
",",
"ifempty",
"=",
"None",
")",
":",
"ts",
"=",
"self",
".",
"get",
"(",
"local",
")",
"if",
"not",
"ts",
":",
"return",
"ifempty",
"return",
"ts",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S'",
")"
] |
Returns the string representation of the datetime
|
[
"Returns",
"the",
"string",
"representation",
"of",
"the",
"datetime"
] |
729bf9ef637e084c8ab3cc16c34cf659d3a79ee4
|
https://github.com/zabertech/python-izaber/blob/729bf9ef637e084c8ab3cc16c34cf659d3a79ee4/izaber/date.py#L366-L371
|
239,436
|
avanwyk/cipy
|
cipy/algorithms/pso/functions.py
|
gc_velocity_update
|
def gc_velocity_update(particle, social, state):
""" Guaranteed convergence velocity update.
Args:
particle: cipy.algorithms.pso.Particle: Particle to update the velocity
for.
social: cipy.algorithms.pso.Particle: The social best for the particle.
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
Returns:
numpy.ndarray: the calculated velocity.
"""
gbest = state.swarm[gbest_idx(state.swarm)].position
if not np.array_equal(gbest, particle.position):
return std_velocity(particle, social, state)
rho = state.params['rho']
inertia = state.params['inertia']
v_max = state.params['v_max']
size = particle.position.size
r2 = state.rng.uniform(0.0, 1.0, size)
velocity = __gc_velocity_equation__(inertia, rho, r2, particle, gbest)
return __clamp__(velocity, v_max)
|
python
|
def gc_velocity_update(particle, social, state):
""" Guaranteed convergence velocity update.
Args:
particle: cipy.algorithms.pso.Particle: Particle to update the velocity
for.
social: cipy.algorithms.pso.Particle: The social best for the particle.
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
Returns:
numpy.ndarray: the calculated velocity.
"""
gbest = state.swarm[gbest_idx(state.swarm)].position
if not np.array_equal(gbest, particle.position):
return std_velocity(particle, social, state)
rho = state.params['rho']
inertia = state.params['inertia']
v_max = state.params['v_max']
size = particle.position.size
r2 = state.rng.uniform(0.0, 1.0, size)
velocity = __gc_velocity_equation__(inertia, rho, r2, particle, gbest)
return __clamp__(velocity, v_max)
|
[
"def",
"gc_velocity_update",
"(",
"particle",
",",
"social",
",",
"state",
")",
":",
"gbest",
"=",
"state",
".",
"swarm",
"[",
"gbest_idx",
"(",
"state",
".",
"swarm",
")",
"]",
".",
"position",
"if",
"not",
"np",
".",
"array_equal",
"(",
"gbest",
",",
"particle",
".",
"position",
")",
":",
"return",
"std_velocity",
"(",
"particle",
",",
"social",
",",
"state",
")",
"rho",
"=",
"state",
".",
"params",
"[",
"'rho'",
"]",
"inertia",
"=",
"state",
".",
"params",
"[",
"'inertia'",
"]",
"v_max",
"=",
"state",
".",
"params",
"[",
"'v_max'",
"]",
"size",
"=",
"particle",
".",
"position",
".",
"size",
"r2",
"=",
"state",
".",
"rng",
".",
"uniform",
"(",
"0.0",
",",
"1.0",
",",
"size",
")",
"velocity",
"=",
"__gc_velocity_equation__",
"(",
"inertia",
",",
"rho",
",",
"r2",
",",
"particle",
",",
"gbest",
")",
"return",
"__clamp__",
"(",
"velocity",
",",
"v_max",
")"
] |
Guaranteed convergence velocity update.
Args:
particle: cipy.algorithms.pso.Particle: Particle to update the velocity
for.
social: cipy.algorithms.pso.Particle: The social best for the particle.
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
Returns:
numpy.ndarray: the calculated velocity.
|
[
"Guaranteed",
"convergence",
"velocity",
"update",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/cipy/algorithms/pso/functions.py#L88-L111
|
239,437
|
avanwyk/cipy
|
cipy/algorithms/pso/functions.py
|
update_fitness
|
def update_fitness(objective_function, particle):
""" Calculates and updates the fitness and best_fitness of a particle.
Fitness is calculated using the 'problem.fitness' function.
Args:
problem: The optimization problem encapsulating the fitness function
and optimization type.
particle: cipy.algorithms.pso.Particle: Particle to update the fitness
for.
Returns:
cipy.algorithms.pso.Particle: A new particle with the updated fitness.
"""
fitness = objective_function(particle.position)
best_fitness = particle.best_fitness
cmp = comparator(fitness)
if best_fitness is None or cmp(fitness, best_fitness):
best_position = particle.position
return particle._replace(fitness=fitness,
best_fitness=fitness,
best_position=best_position)
else:
return particle._replace(fitness=fitness)
|
python
|
def update_fitness(objective_function, particle):
""" Calculates and updates the fitness and best_fitness of a particle.
Fitness is calculated using the 'problem.fitness' function.
Args:
problem: The optimization problem encapsulating the fitness function
and optimization type.
particle: cipy.algorithms.pso.Particle: Particle to update the fitness
for.
Returns:
cipy.algorithms.pso.Particle: A new particle with the updated fitness.
"""
fitness = objective_function(particle.position)
best_fitness = particle.best_fitness
cmp = comparator(fitness)
if best_fitness is None or cmp(fitness, best_fitness):
best_position = particle.position
return particle._replace(fitness=fitness,
best_fitness=fitness,
best_position=best_position)
else:
return particle._replace(fitness=fitness)
|
[
"def",
"update_fitness",
"(",
"objective_function",
",",
"particle",
")",
":",
"fitness",
"=",
"objective_function",
"(",
"particle",
".",
"position",
")",
"best_fitness",
"=",
"particle",
".",
"best_fitness",
"cmp",
"=",
"comparator",
"(",
"fitness",
")",
"if",
"best_fitness",
"is",
"None",
"or",
"cmp",
"(",
"fitness",
",",
"best_fitness",
")",
":",
"best_position",
"=",
"particle",
".",
"position",
"return",
"particle",
".",
"_replace",
"(",
"fitness",
"=",
"fitness",
",",
"best_fitness",
"=",
"fitness",
",",
"best_position",
"=",
"best_position",
")",
"else",
":",
"return",
"particle",
".",
"_replace",
"(",
"fitness",
"=",
"fitness",
")"
] |
Calculates and updates the fitness and best_fitness of a particle.
Fitness is calculated using the 'problem.fitness' function.
Args:
problem: The optimization problem encapsulating the fitness function
and optimization type.
particle: cipy.algorithms.pso.Particle: Particle to update the fitness
for.
Returns:
cipy.algorithms.pso.Particle: A new particle with the updated fitness.
|
[
"Calculates",
"and",
"updates",
"the",
"fitness",
"and",
"best_fitness",
"of",
"a",
"particle",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/cipy/algorithms/pso/functions.py#L141-L165
|
239,438
|
avanwyk/cipy
|
cipy/algorithms/pso/functions.py
|
update_particle
|
def update_particle(position_update, velocity_update, state, nbest_topology,
idx_particle):
""" Update function for a particle.
Calculates and updates the velocity and position of a particle for a
single iteration of the PSO algorithm. Social best particle is determined
by the state.params['topology'] function.
Args:
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
nbest_topology: dict: Containing neighbourhood best index for each
particle index.
idx_particle: tuple: Tuple of the index of the particle and the
particle itself.
Returns:
cipy.algorithms.pso.Particle: A new particle with the updated position
and velocity.
"""
(idx, particle) = idx_particle
nbest = state.swarm[nbest_topology[idx]].best_position
velocity = velocity_update(particle, nbest, state)
position = position_update(particle.position, velocity)
return particle._replace(position=position, velocity=velocity)
|
python
|
def update_particle(position_update, velocity_update, state, nbest_topology,
idx_particle):
""" Update function for a particle.
Calculates and updates the velocity and position of a particle for a
single iteration of the PSO algorithm. Social best particle is determined
by the state.params['topology'] function.
Args:
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
nbest_topology: dict: Containing neighbourhood best index for each
particle index.
idx_particle: tuple: Tuple of the index of the particle and the
particle itself.
Returns:
cipy.algorithms.pso.Particle: A new particle with the updated position
and velocity.
"""
(idx, particle) = idx_particle
nbest = state.swarm[nbest_topology[idx]].best_position
velocity = velocity_update(particle, nbest, state)
position = position_update(particle.position, velocity)
return particle._replace(position=position, velocity=velocity)
|
[
"def",
"update_particle",
"(",
"position_update",
",",
"velocity_update",
",",
"state",
",",
"nbest_topology",
",",
"idx_particle",
")",
":",
"(",
"idx",
",",
"particle",
")",
"=",
"idx_particle",
"nbest",
"=",
"state",
".",
"swarm",
"[",
"nbest_topology",
"[",
"idx",
"]",
"]",
".",
"best_position",
"velocity",
"=",
"velocity_update",
"(",
"particle",
",",
"nbest",
",",
"state",
")",
"position",
"=",
"position_update",
"(",
"particle",
".",
"position",
",",
"velocity",
")",
"return",
"particle",
".",
"_replace",
"(",
"position",
"=",
"position",
",",
"velocity",
"=",
"velocity",
")"
] |
Update function for a particle.
Calculates and updates the velocity and position of a particle for a
single iteration of the PSO algorithm. Social best particle is determined
by the state.params['topology'] function.
Args:
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
nbest_topology: dict: Containing neighbourhood best index for each
particle index.
idx_particle: tuple: Tuple of the index of the particle and the
particle itself.
Returns:
cipy.algorithms.pso.Particle: A new particle with the updated position
and velocity.
|
[
"Update",
"function",
"for",
"a",
"particle",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/cipy/algorithms/pso/functions.py#L168-L194
|
239,439
|
avanwyk/cipy
|
cipy/algorithms/pso/functions.py
|
gbest_idx
|
def gbest_idx(swarm):
""" gbest Neighbourhood topology function.
Args:
swarm: list: The list of particles.
Returns:
int: The index of the gbest particle.
"""
best = 0
cmp = comparator(swarm[best].best_fitness)
for (idx, particle) in enumerate(swarm):
if cmp(particle.best_fitness, swarm[best].best_fitness):
best = idx
return best
|
python
|
def gbest_idx(swarm):
""" gbest Neighbourhood topology function.
Args:
swarm: list: The list of particles.
Returns:
int: The index of the gbest particle.
"""
best = 0
cmp = comparator(swarm[best].best_fitness)
for (idx, particle) in enumerate(swarm):
if cmp(particle.best_fitness, swarm[best].best_fitness):
best = idx
return best
|
[
"def",
"gbest_idx",
"(",
"swarm",
")",
":",
"best",
"=",
"0",
"cmp",
"=",
"comparator",
"(",
"swarm",
"[",
"best",
"]",
".",
"best_fitness",
")",
"for",
"(",
"idx",
",",
"particle",
")",
"in",
"enumerate",
"(",
"swarm",
")",
":",
"if",
"cmp",
"(",
"particle",
".",
"best_fitness",
",",
"swarm",
"[",
"best",
"]",
".",
"best_fitness",
")",
":",
"best",
"=",
"idx",
"return",
"best"
] |
gbest Neighbourhood topology function.
Args:
swarm: list: The list of particles.
Returns:
int: The index of the gbest particle.
|
[
"gbest",
"Neighbourhood",
"topology",
"function",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/cipy/algorithms/pso/functions.py#L202-L216
|
239,440
|
avanwyk/cipy
|
cipy/algorithms/pso/functions.py
|
lbest_idx
|
def lbest_idx(state, idx):
""" lbest Neighbourhood topology function.
Neighbourhood size is determined by state.params['n_s'].
Args:
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
idx: int: index of the particle in the swarm.
Returns:
int: The index of the lbest particle.
"""
swarm = state.swarm
n_s = state.params['n_s']
cmp = comparator(swarm[0].best_fitness)
indices = __lbest_indices__(len(swarm), n_s, idx)
best = None
for i in indices:
if best is None or cmp(swarm[i].best_fitness, swarm[best].best_fitness):
best = i
return best
|
python
|
def lbest_idx(state, idx):
""" lbest Neighbourhood topology function.
Neighbourhood size is determined by state.params['n_s'].
Args:
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
idx: int: index of the particle in the swarm.
Returns:
int: The index of the lbest particle.
"""
swarm = state.swarm
n_s = state.params['n_s']
cmp = comparator(swarm[0].best_fitness)
indices = __lbest_indices__(len(swarm), n_s, idx)
best = None
for i in indices:
if best is None or cmp(swarm[i].best_fitness, swarm[best].best_fitness):
best = i
return best
|
[
"def",
"lbest_idx",
"(",
"state",
",",
"idx",
")",
":",
"swarm",
"=",
"state",
".",
"swarm",
"n_s",
"=",
"state",
".",
"params",
"[",
"'n_s'",
"]",
"cmp",
"=",
"comparator",
"(",
"swarm",
"[",
"0",
"]",
".",
"best_fitness",
")",
"indices",
"=",
"__lbest_indices__",
"(",
"len",
"(",
"swarm",
")",
",",
"n_s",
",",
"idx",
")",
"best",
"=",
"None",
"for",
"i",
"in",
"indices",
":",
"if",
"best",
"is",
"None",
"or",
"cmp",
"(",
"swarm",
"[",
"i",
"]",
".",
"best_fitness",
",",
"swarm",
"[",
"best",
"]",
".",
"best_fitness",
")",
":",
"best",
"=",
"i",
"return",
"best"
] |
lbest Neighbourhood topology function.
Neighbourhood size is determined by state.params['n_s'].
Args:
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
idx: int: index of the particle in the swarm.
Returns:
int: The index of the lbest particle.
|
[
"lbest",
"Neighbourhood",
"topology",
"function",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/cipy/algorithms/pso/functions.py#L223-L243
|
239,441
|
avanwyk/cipy
|
cipy/algorithms/pso/functions.py
|
solution
|
def solution(swarm):
""" Determines the global best particle in the swarm.
Args:
swarm: iterable: an iterable that yields all particles in the swarm.
Returns:
cipy.algorithms.pso.Particle: The best particle in the swarm when
comparing the best_fitness values of the particles.
"""
best = swarm[0]
cmp = comparator(best.best_fitness)
for particle in swarm:
if cmp(particle.best_fitness, best.best_fitness):
best = particle
return best
|
python
|
def solution(swarm):
""" Determines the global best particle in the swarm.
Args:
swarm: iterable: an iterable that yields all particles in the swarm.
Returns:
cipy.algorithms.pso.Particle: The best particle in the swarm when
comparing the best_fitness values of the particles.
"""
best = swarm[0]
cmp = comparator(best.best_fitness)
for particle in swarm:
if cmp(particle.best_fitness, best.best_fitness):
best = particle
return best
|
[
"def",
"solution",
"(",
"swarm",
")",
":",
"best",
"=",
"swarm",
"[",
"0",
"]",
"cmp",
"=",
"comparator",
"(",
"best",
".",
"best_fitness",
")",
"for",
"particle",
"in",
"swarm",
":",
"if",
"cmp",
"(",
"particle",
".",
"best_fitness",
",",
"best",
".",
"best_fitness",
")",
":",
"best",
"=",
"particle",
"return",
"best"
] |
Determines the global best particle in the swarm.
Args:
swarm: iterable: an iterable that yields all particles in the swarm.
Returns:
cipy.algorithms.pso.Particle: The best particle in the swarm when
comparing the best_fitness values of the particles.
|
[
"Determines",
"the",
"global",
"best",
"particle",
"in",
"the",
"swarm",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/cipy/algorithms/pso/functions.py#L288-L303
|
239,442
|
roboogle/gtkmvc3
|
gtkmvco/examples/converter/src/views/currencies.py
|
CurrenciesView.add_currency_view
|
def add_currency_view(self, select=False):
"""returns the newly added view"""
v = CurrencyView()
self.remove_currency_view()
self['hbox_top'].pack_end(v.get_top_widget())
v.light_name(select)
return v
|
python
|
def add_currency_view(self, select=False):
"""returns the newly added view"""
v = CurrencyView()
self.remove_currency_view()
self['hbox_top'].pack_end(v.get_top_widget())
v.light_name(select)
return v
|
[
"def",
"add_currency_view",
"(",
"self",
",",
"select",
"=",
"False",
")",
":",
"v",
"=",
"CurrencyView",
"(",
")",
"self",
".",
"remove_currency_view",
"(",
")",
"self",
"[",
"'hbox_top'",
"]",
".",
"pack_end",
"(",
"v",
".",
"get_top_widget",
"(",
")",
")",
"v",
".",
"light_name",
"(",
"select",
")",
"return",
"v"
] |
returns the newly added view
|
[
"returns",
"the",
"newly",
"added",
"view"
] |
63405fd8d2056be26af49103b13a8d5e57fe4dff
|
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/examples/converter/src/views/currencies.py#L41-L47
|
239,443
|
mback2k/python-appengine-auth
|
social_appengine_auth/pipelines.py
|
associate_by_user_id
|
def associate_by_user_id(backend, details, response, user=None, *args, **kwargs):
"""
Associate current auth with a user with the same Google user_id in the DB.
"""
if user:
return None
user_id = response.get('id')
if user_id:
# Try to associate accounts registered with the same Google user_id.
for provider in ('google-appengine-oauth', 'google-appengine-oauth2'):
social = backend.strategy.storage.user.get_social_auth(provider, user_id)
if social:
user = social.user
if user:
return {'user': user}
|
python
|
def associate_by_user_id(backend, details, response, user=None, *args, **kwargs):
"""
Associate current auth with a user with the same Google user_id in the DB.
"""
if user:
return None
user_id = response.get('id')
if user_id:
# Try to associate accounts registered with the same Google user_id.
for provider in ('google-appengine-oauth', 'google-appengine-oauth2'):
social = backend.strategy.storage.user.get_social_auth(provider, user_id)
if social:
user = social.user
if user:
return {'user': user}
|
[
"def",
"associate_by_user_id",
"(",
"backend",
",",
"details",
",",
"response",
",",
"user",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"user",
":",
"return",
"None",
"user_id",
"=",
"response",
".",
"get",
"(",
"'id'",
")",
"if",
"user_id",
":",
"# Try to associate accounts registered with the same Google user_id.",
"for",
"provider",
"in",
"(",
"'google-appengine-oauth'",
",",
"'google-appengine-oauth2'",
")",
":",
"social",
"=",
"backend",
".",
"strategy",
".",
"storage",
".",
"user",
".",
"get_social_auth",
"(",
"provider",
",",
"user_id",
")",
"if",
"social",
":",
"user",
"=",
"social",
".",
"user",
"if",
"user",
":",
"return",
"{",
"'user'",
":",
"user",
"}"
] |
Associate current auth with a user with the same Google user_id in the DB.
|
[
"Associate",
"current",
"auth",
"with",
"a",
"user",
"with",
"the",
"same",
"Google",
"user_id",
"in",
"the",
"DB",
"."
] |
dd27a0c53c7bebe147f7a6e3606c67ec673ac4d6
|
https://github.com/mback2k/python-appengine-auth/blob/dd27a0c53c7bebe147f7a6e3606c67ec673ac4d6/social_appengine_auth/pipelines.py#L2-L17
|
239,444
|
twneale/hercules
|
hercules/decorators.py
|
memoize_methodcalls
|
def memoize_methodcalls(func, pickle=False, dumps=pickle.dumps):
'''Cache the results of the function for each input it gets called with.
'''
cache = func._memoize_cache = {}
@functools.wraps(func)
def memoizer(self, *args, **kwargs):
if pickle:
key = dumps((args, kwargs))
else:
key = args
if key not in cache:
cache[key] = func(self, *args, **kwargs)
return cache[key]
return memoizer
|
python
|
def memoize_methodcalls(func, pickle=False, dumps=pickle.dumps):
'''Cache the results of the function for each input it gets called with.
'''
cache = func._memoize_cache = {}
@functools.wraps(func)
def memoizer(self, *args, **kwargs):
if pickle:
key = dumps((args, kwargs))
else:
key = args
if key not in cache:
cache[key] = func(self, *args, **kwargs)
return cache[key]
return memoizer
|
[
"def",
"memoize_methodcalls",
"(",
"func",
",",
"pickle",
"=",
"False",
",",
"dumps",
"=",
"pickle",
".",
"dumps",
")",
":",
"cache",
"=",
"func",
".",
"_memoize_cache",
"=",
"{",
"}",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"memoizer",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"pickle",
":",
"key",
"=",
"dumps",
"(",
"(",
"args",
",",
"kwargs",
")",
")",
"else",
":",
"key",
"=",
"args",
"if",
"key",
"not",
"in",
"cache",
":",
"cache",
"[",
"key",
"]",
"=",
"func",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"cache",
"[",
"key",
"]",
"return",
"memoizer"
] |
Cache the results of the function for each input it gets called with.
|
[
"Cache",
"the",
"results",
"of",
"the",
"function",
"for",
"each",
"input",
"it",
"gets",
"called",
"with",
"."
] |
cd61582ef7e593093e9b28b56798df4203d1467a
|
https://github.com/twneale/hercules/blob/cd61582ef7e593093e9b28b56798df4203d1467a/hercules/decorators.py#L40-L53
|
239,445
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/config/ConfigParams.py
|
ConfigParams.get_section
|
def get_section(self, section):
"""
Gets parameters from specific section stored in this ConfigMap.
The section name is removed from parameter keys.
:param section: name of the section to retrieve configuration parameters from.
:return: all configuration parameters that belong to the section named 'section'.
"""
result = ConfigParams()
prefix = section + "."
for (key, value) in self.items():
# Prevents exception on the next line
if len(key) < len(prefix):
continue
# Perform case sensitive match
key_prefix = key[: len(prefix)]
if key_prefix == prefix:
key = key[len(prefix): ]
result[key] = value
return result
|
python
|
def get_section(self, section):
"""
Gets parameters from specific section stored in this ConfigMap.
The section name is removed from parameter keys.
:param section: name of the section to retrieve configuration parameters from.
:return: all configuration parameters that belong to the section named 'section'.
"""
result = ConfigParams()
prefix = section + "."
for (key, value) in self.items():
# Prevents exception on the next line
if len(key) < len(prefix):
continue
# Perform case sensitive match
key_prefix = key[: len(prefix)]
if key_prefix == prefix:
key = key[len(prefix): ]
result[key] = value
return result
|
[
"def",
"get_section",
"(",
"self",
",",
"section",
")",
":",
"result",
"=",
"ConfigParams",
"(",
")",
"prefix",
"=",
"section",
"+",
"\".\"",
"for",
"(",
"key",
",",
"value",
")",
"in",
"self",
".",
"items",
"(",
")",
":",
"# Prevents exception on the next line",
"if",
"len",
"(",
"key",
")",
"<",
"len",
"(",
"prefix",
")",
":",
"continue",
"# Perform case sensitive match",
"key_prefix",
"=",
"key",
"[",
":",
"len",
"(",
"prefix",
")",
"]",
"if",
"key_prefix",
"==",
"prefix",
":",
"key",
"=",
"key",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"result",
"[",
"key",
"]",
"=",
"value",
"return",
"result"
] |
Gets parameters from specific section stored in this ConfigMap.
The section name is removed from parameter keys.
:param section: name of the section to retrieve configuration parameters from.
:return: all configuration parameters that belong to the section named 'section'.
|
[
"Gets",
"parameters",
"from",
"specific",
"section",
"stored",
"in",
"this",
"ConfigMap",
".",
"The",
"section",
"name",
"is",
"removed",
"from",
"parameter",
"keys",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/config/ConfigParams.py#L81-L104
|
239,446
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/config/ConfigParams.py
|
ConfigParams.add_section
|
def add_section(self, section, section_params):
"""
Adds parameters into this ConfigParams under specified section.
Keys for the new parameters are appended with section dot prefix.
:param section: name of the section where add new parameters
:param section_params: new parameters to be added.
"""
if section == None:
raise Exception("Section name cannot be null")
section = "" if self._is_shadow_name(section) else section
if section_params == None or len(section_params) == 0:
return
for (key, value) in section_params.items():
key = "" if self._is_shadow_name(key) else key
if len(key) > 0 and len(section) > 0:
key = section + "." + key
elif len(key) == 0:
key = section
self[key] = value
|
python
|
def add_section(self, section, section_params):
"""
Adds parameters into this ConfigParams under specified section.
Keys for the new parameters are appended with section dot prefix.
:param section: name of the section where add new parameters
:param section_params: new parameters to be added.
"""
if section == None:
raise Exception("Section name cannot be null")
section = "" if self._is_shadow_name(section) else section
if section_params == None or len(section_params) == 0:
return
for (key, value) in section_params.items():
key = "" if self._is_shadow_name(key) else key
if len(key) > 0 and len(section) > 0:
key = section + "." + key
elif len(key) == 0:
key = section
self[key] = value
|
[
"def",
"add_section",
"(",
"self",
",",
"section",
",",
"section_params",
")",
":",
"if",
"section",
"==",
"None",
":",
"raise",
"Exception",
"(",
"\"Section name cannot be null\"",
")",
"section",
"=",
"\"\"",
"if",
"self",
".",
"_is_shadow_name",
"(",
"section",
")",
"else",
"section",
"if",
"section_params",
"==",
"None",
"or",
"len",
"(",
"section_params",
")",
"==",
"0",
":",
"return",
"for",
"(",
"key",
",",
"value",
")",
"in",
"section_params",
".",
"items",
"(",
")",
":",
"key",
"=",
"\"\"",
"if",
"self",
".",
"_is_shadow_name",
"(",
"key",
")",
"else",
"key",
"if",
"len",
"(",
"key",
")",
">",
"0",
"and",
"len",
"(",
"section",
")",
">",
"0",
":",
"key",
"=",
"section",
"+",
"\".\"",
"+",
"key",
"elif",
"len",
"(",
"key",
")",
"==",
"0",
":",
"key",
"=",
"section",
"self",
"[",
"key",
"]",
"=",
"value"
] |
Adds parameters into this ConfigParams under specified section.
Keys for the new parameters are appended with section dot prefix.
:param section: name of the section where add new parameters
:param section_params: new parameters to be added.
|
[
"Adds",
"parameters",
"into",
"this",
"ConfigParams",
"under",
"specified",
"section",
".",
"Keys",
"for",
"the",
"new",
"parameters",
"are",
"appended",
"with",
"section",
"dot",
"prefix",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/config/ConfigParams.py#L111-L136
|
239,447
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/config/ConfigParams.py
|
ConfigParams.override
|
def override(self, config_params):
"""
Overrides parameters with new values from specified ConfigParams and returns a new ConfigParams object.
:param config_params: ConfigMap with parameters to override the current values.
:return: a new ConfigParams object.
"""
map = StringValueMap.from_maps(self, config_params)
return ConfigParams(map)
|
python
|
def override(self, config_params):
"""
Overrides parameters with new values from specified ConfigParams and returns a new ConfigParams object.
:param config_params: ConfigMap with parameters to override the current values.
:return: a new ConfigParams object.
"""
map = StringValueMap.from_maps(self, config_params)
return ConfigParams(map)
|
[
"def",
"override",
"(",
"self",
",",
"config_params",
")",
":",
"map",
"=",
"StringValueMap",
".",
"from_maps",
"(",
"self",
",",
"config_params",
")",
"return",
"ConfigParams",
"(",
"map",
")"
] |
Overrides parameters with new values from specified ConfigParams and returns a new ConfigParams object.
:param config_params: ConfigMap with parameters to override the current values.
:return: a new ConfigParams object.
|
[
"Overrides",
"parameters",
"with",
"new",
"values",
"from",
"specified",
"ConfigParams",
"and",
"returns",
"a",
"new",
"ConfigParams",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/config/ConfigParams.py#L139-L148
|
239,448
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/config/ConfigParams.py
|
ConfigParams.set_defaults
|
def set_defaults(self, default_config_params):
"""
Set default values from specified ConfigParams and returns a new ConfigParams object.
:param default_config_params: ConfigMap with default parameter values.
:return: a new ConfigParams object.
"""
map = StringValueMap.from_maps(default_config_params, self)
return ConfigParams(map)
|
python
|
def set_defaults(self, default_config_params):
"""
Set default values from specified ConfigParams and returns a new ConfigParams object.
:param default_config_params: ConfigMap with default parameter values.
:return: a new ConfigParams object.
"""
map = StringValueMap.from_maps(default_config_params, self)
return ConfigParams(map)
|
[
"def",
"set_defaults",
"(",
"self",
",",
"default_config_params",
")",
":",
"map",
"=",
"StringValueMap",
".",
"from_maps",
"(",
"default_config_params",
",",
"self",
")",
"return",
"ConfigParams",
"(",
"map",
")"
] |
Set default values from specified ConfigParams and returns a new ConfigParams object.
:param default_config_params: ConfigMap with default parameter values.
:return: a new ConfigParams object.
|
[
"Set",
"default",
"values",
"from",
"specified",
"ConfigParams",
"and",
"returns",
"a",
"new",
"ConfigParams",
"object",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/config/ConfigParams.py#L151-L160
|
239,449
|
steenzout/python-serialization-json
|
steenzout/serialization/json/__init__.py
|
serialize
|
def serialize(obj):
"""Serialize the given object into JSON.
Args:
obj: the object to be serialized.
Returns:
(str): JSON representation of the given object.
"""
LOGGER.debug('serialize(%s)', obj)
if isinstance(obj, datetime.date):
return simplejson.dumps(obj, default=encoders.as_date)
elif hasattr(obj, '__dict__'):
return simplejson.dumps(obj, default=encoders.as_object)
return simplejson.dumps(obj)
|
python
|
def serialize(obj):
"""Serialize the given object into JSON.
Args:
obj: the object to be serialized.
Returns:
(str): JSON representation of the given object.
"""
LOGGER.debug('serialize(%s)', obj)
if isinstance(obj, datetime.date):
return simplejson.dumps(obj, default=encoders.as_date)
elif hasattr(obj, '__dict__'):
return simplejson.dumps(obj, default=encoders.as_object)
return simplejson.dumps(obj)
|
[
"def",
"serialize",
"(",
"obj",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'serialize(%s)'",
",",
"obj",
")",
"if",
"isinstance",
"(",
"obj",
",",
"datetime",
".",
"date",
")",
":",
"return",
"simplejson",
".",
"dumps",
"(",
"obj",
",",
"default",
"=",
"encoders",
".",
"as_date",
")",
"elif",
"hasattr",
"(",
"obj",
",",
"'__dict__'",
")",
":",
"return",
"simplejson",
".",
"dumps",
"(",
"obj",
",",
"default",
"=",
"encoders",
".",
"as_object",
")",
"return",
"simplejson",
".",
"dumps",
"(",
"obj",
")"
] |
Serialize the given object into JSON.
Args:
obj: the object to be serialized.
Returns:
(str): JSON representation of the given object.
|
[
"Serialize",
"the",
"given",
"object",
"into",
"JSON",
"."
] |
583568e14cc02ba0bf711f56b8a0a3ad142c696d
|
https://github.com/steenzout/python-serialization-json/blob/583568e14cc02ba0bf711f56b8a0a3ad142c696d/steenzout/serialization/json/__init__.py#L30-L47
|
239,450
|
steenzout/python-serialization-json
|
steenzout/serialization/json/__init__.py
|
deserialize
|
def deserialize(json, cls=None):
"""Deserialize a JSON string into a Python object.
Args:
json (str): the JSON string.
cls (:py:class:`object`):
if the ``json`` is deserialized into a ``dict`` and
this argument is set,
the ``dict`` keys are passed as keyword arguments to the
given ``cls`` initializer.
Returns:
Python object representation of the given JSON string.
"""
LOGGER.debug('deserialize(%s)', json)
out = simplejson.loads(json)
if isinstance(out, dict) and cls is not None:
return cls(**out)
return out
|
python
|
def deserialize(json, cls=None):
"""Deserialize a JSON string into a Python object.
Args:
json (str): the JSON string.
cls (:py:class:`object`):
if the ``json`` is deserialized into a ``dict`` and
this argument is set,
the ``dict`` keys are passed as keyword arguments to the
given ``cls`` initializer.
Returns:
Python object representation of the given JSON string.
"""
LOGGER.debug('deserialize(%s)', json)
out = simplejson.loads(json)
if isinstance(out, dict) and cls is not None:
return cls(**out)
return out
|
[
"def",
"deserialize",
"(",
"json",
",",
"cls",
"=",
"None",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'deserialize(%s)'",
",",
"json",
")",
"out",
"=",
"simplejson",
".",
"loads",
"(",
"json",
")",
"if",
"isinstance",
"(",
"out",
",",
"dict",
")",
"and",
"cls",
"is",
"not",
"None",
":",
"return",
"cls",
"(",
"*",
"*",
"out",
")",
"return",
"out"
] |
Deserialize a JSON string into a Python object.
Args:
json (str): the JSON string.
cls (:py:class:`object`):
if the ``json`` is deserialized into a ``dict`` and
this argument is set,
the ``dict`` keys are passed as keyword arguments to the
given ``cls`` initializer.
Returns:
Python object representation of the given JSON string.
|
[
"Deserialize",
"a",
"JSON",
"string",
"into",
"a",
"Python",
"object",
"."
] |
583568e14cc02ba0bf711f56b8a0a3ad142c696d
|
https://github.com/steenzout/python-serialization-json/blob/583568e14cc02ba0bf711f56b8a0a3ad142c696d/steenzout/serialization/json/__init__.py#L50-L71
|
239,451
|
zmiller91/aws-lambda-api-builder
|
api_builder/cloudformation.py
|
main
|
def main(stack_name, template, mustache_variables):
'Update or create stack'
template_data = _parse_template(template, mustache_variables)
params = {
'StackName': stack_name,
'TemplateBody': template_data
}
try:
if _stack_exists(stack_name):
print('Updating {}'.format(stack_name))
stack_result = cf.update_stack(
**params,
Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'])
waiter = cf.get_waiter('stack_update_complete')
waiter.wait(StackName=stack_name)
else:
print('Creating {}'.format(stack_name))
stack_result = cf.create_stack(
**params,
Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'])
try:
waiter = cf.get_waiter('stack_create_complete')
print("...waiting for stack to be ready...")
waiter.wait(StackName=stack_name)
except Exception as ex:
print(ex)
print("""
There was an error creating your stack. Please go to CloudFormation in your
AWS console, click on the stack you created, resolve any errors, delete the stack
and try again.
You are seeing this error because your stack failed to create, when stacks fail
to create they are put into a terminal ROLLBACK_COMPLETE state and the stack cannot
be recovered because they have no previous state to roll back to.
""")
exit(1)
except botocore.exceptions.ClientError as ex:
error_message = ex.response['Error']['Message']
if error_message == 'No updates are to be performed.':
print("No changes")
else:
raise
else:
print(json.dumps(
cf.describe_stacks(StackName=stack_result['StackId']),
indent=2,
default=json_serial
))
|
python
|
def main(stack_name, template, mustache_variables):
'Update or create stack'
template_data = _parse_template(template, mustache_variables)
params = {
'StackName': stack_name,
'TemplateBody': template_data
}
try:
if _stack_exists(stack_name):
print('Updating {}'.format(stack_name))
stack_result = cf.update_stack(
**params,
Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'])
waiter = cf.get_waiter('stack_update_complete')
waiter.wait(StackName=stack_name)
else:
print('Creating {}'.format(stack_name))
stack_result = cf.create_stack(
**params,
Capabilities=['CAPABILITY_IAM', 'CAPABILITY_NAMED_IAM'])
try:
waiter = cf.get_waiter('stack_create_complete')
print("...waiting for stack to be ready...")
waiter.wait(StackName=stack_name)
except Exception as ex:
print(ex)
print("""
There was an error creating your stack. Please go to CloudFormation in your
AWS console, click on the stack you created, resolve any errors, delete the stack
and try again.
You are seeing this error because your stack failed to create, when stacks fail
to create they are put into a terminal ROLLBACK_COMPLETE state and the stack cannot
be recovered because they have no previous state to roll back to.
""")
exit(1)
except botocore.exceptions.ClientError as ex:
error_message = ex.response['Error']['Message']
if error_message == 'No updates are to be performed.':
print("No changes")
else:
raise
else:
print(json.dumps(
cf.describe_stacks(StackName=stack_result['StackId']),
indent=2,
default=json_serial
))
|
[
"def",
"main",
"(",
"stack_name",
",",
"template",
",",
"mustache_variables",
")",
":",
"template_data",
"=",
"_parse_template",
"(",
"template",
",",
"mustache_variables",
")",
"params",
"=",
"{",
"'StackName'",
":",
"stack_name",
",",
"'TemplateBody'",
":",
"template_data",
"}",
"try",
":",
"if",
"_stack_exists",
"(",
"stack_name",
")",
":",
"print",
"(",
"'Updating {}'",
".",
"format",
"(",
"stack_name",
")",
")",
"stack_result",
"=",
"cf",
".",
"update_stack",
"(",
"*",
"*",
"params",
",",
"Capabilities",
"=",
"[",
"'CAPABILITY_IAM'",
",",
"'CAPABILITY_NAMED_IAM'",
"]",
")",
"waiter",
"=",
"cf",
".",
"get_waiter",
"(",
"'stack_update_complete'",
")",
"waiter",
".",
"wait",
"(",
"StackName",
"=",
"stack_name",
")",
"else",
":",
"print",
"(",
"'Creating {}'",
".",
"format",
"(",
"stack_name",
")",
")",
"stack_result",
"=",
"cf",
".",
"create_stack",
"(",
"*",
"*",
"params",
",",
"Capabilities",
"=",
"[",
"'CAPABILITY_IAM'",
",",
"'CAPABILITY_NAMED_IAM'",
"]",
")",
"try",
":",
"waiter",
"=",
"cf",
".",
"get_waiter",
"(",
"'stack_create_complete'",
")",
"print",
"(",
"\"...waiting for stack to be ready...\"",
")",
"waiter",
".",
"wait",
"(",
"StackName",
"=",
"stack_name",
")",
"except",
"Exception",
"as",
"ex",
":",
"print",
"(",
"ex",
")",
"print",
"(",
"\"\"\"\n There was an error creating your stack. Please go to CloudFormation in your \n AWS console, click on the stack you created, resolve any errors, delete the stack\n and try again.\n\n You are seeing this error because your stack failed to create, when stacks fail\n to create they are put into a terminal ROLLBACK_COMPLETE state and the stack cannot\n be recovered because they have no previous state to roll back to.\n \"\"\"",
")",
"exit",
"(",
"1",
")",
"except",
"botocore",
".",
"exceptions",
".",
"ClientError",
"as",
"ex",
":",
"error_message",
"=",
"ex",
".",
"response",
"[",
"'Error'",
"]",
"[",
"'Message'",
"]",
"if",
"error_message",
"==",
"'No updates are to be performed.'",
":",
"print",
"(",
"\"No changes\"",
")",
"else",
":",
"raise",
"else",
":",
"print",
"(",
"json",
".",
"dumps",
"(",
"cf",
".",
"describe_stacks",
"(",
"StackName",
"=",
"stack_result",
"[",
"'StackId'",
"]",
")",
",",
"indent",
"=",
"2",
",",
"default",
"=",
"json_serial",
")",
")"
] |
Update or create stack
|
[
"Update",
"or",
"create",
"stack"
] |
86026b5c134faa33eaa1e1268e0206cb074e3285
|
https://github.com/zmiller91/aws-lambda-api-builder/blob/86026b5c134faa33eaa1e1268e0206cb074e3285/api_builder/cloudformation.py#L10-L60
|
239,452
|
rvswift/EB
|
EB/builder/splitter/splitter_io.py
|
print_extended_help
|
def print_extended_help():
"""
Prints an extended help message.
"""
# initiate TextWrapper class, which will handle all of the string formatting
w = textwrap.TextWrapper()
w.expand_tabs = False
w.width=110
w.initial_indent = ' '
w.subsequent_indent = ' '
print('')
print(textwrap.fill("<split> Complete parameter list:", initial_indent=''))
print('')
cmd = "--input : (required) csv file to split into training and test sets"
print(w.fill(cmd))
cmd = "\t\tColumns should be as follows:"
print(w.fill(cmd))
print('')
cmd="\t\t id, status, receptor_1, receptor_2, ..., receptor_N"
print(w.fill(cmd))
cmd="\t\t CH44, 1, -9.7, -9.3, ..., -10.2"
print(w.fill(cmd))
cmd="\t\t ZN44, 0, -6.6, -6.1, ..., -6.8"
print(w.fill(cmd))
print('')
cmd="\t\tid is a unique molecular identifier"
print(w.fill(cmd))
cmd="\t\tstatus takes a value of '1' if the molecule is active and '0' otherwise."
print(w.fill(cmd))
cmd="\t\treceptor_1 through receptor_N are docking scores."
print(w.fill(cmd))
print('')
tfrac = "--training_fraction : (optional) The fraction of input active molecules\
allocated to the training set, e.g. 0.40. Defaults to allocate half to the training\
set."
print(w.fill(tfrac))
print('')
d2a = "--decoy_to_active : (optional) The decoy to active ratio to establish in the \
training and validation sets. Defaults to maintain the input file ratio."
print(w.fill(d2a))
print('')
|
python
|
def print_extended_help():
"""
Prints an extended help message.
"""
# initiate TextWrapper class, which will handle all of the string formatting
w = textwrap.TextWrapper()
w.expand_tabs = False
w.width=110
w.initial_indent = ' '
w.subsequent_indent = ' '
print('')
print(textwrap.fill("<split> Complete parameter list:", initial_indent=''))
print('')
cmd = "--input : (required) csv file to split into training and test sets"
print(w.fill(cmd))
cmd = "\t\tColumns should be as follows:"
print(w.fill(cmd))
print('')
cmd="\t\t id, status, receptor_1, receptor_2, ..., receptor_N"
print(w.fill(cmd))
cmd="\t\t CH44, 1, -9.7, -9.3, ..., -10.2"
print(w.fill(cmd))
cmd="\t\t ZN44, 0, -6.6, -6.1, ..., -6.8"
print(w.fill(cmd))
print('')
cmd="\t\tid is a unique molecular identifier"
print(w.fill(cmd))
cmd="\t\tstatus takes a value of '1' if the molecule is active and '0' otherwise."
print(w.fill(cmd))
cmd="\t\treceptor_1 through receptor_N are docking scores."
print(w.fill(cmd))
print('')
tfrac = "--training_fraction : (optional) The fraction of input active molecules\
allocated to the training set, e.g. 0.40. Defaults to allocate half to the training\
set."
print(w.fill(tfrac))
print('')
d2a = "--decoy_to_active : (optional) The decoy to active ratio to establish in the \
training and validation sets. Defaults to maintain the input file ratio."
print(w.fill(d2a))
print('')
|
[
"def",
"print_extended_help",
"(",
")",
":",
"# initiate TextWrapper class, which will handle all of the string formatting",
"w",
"=",
"textwrap",
".",
"TextWrapper",
"(",
")",
"w",
".",
"expand_tabs",
"=",
"False",
"w",
".",
"width",
"=",
"110",
"w",
".",
"initial_indent",
"=",
"' '",
"w",
".",
"subsequent_indent",
"=",
"' '",
"print",
"(",
"''",
")",
"print",
"(",
"textwrap",
".",
"fill",
"(",
"\"<split> Complete parameter list:\"",
",",
"initial_indent",
"=",
"''",
")",
")",
"print",
"(",
"''",
")",
"cmd",
"=",
"\"--input : (required) csv file to split into training and test sets\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"cmd",
"=",
"\"\\t\\tColumns should be as follows:\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"print",
"(",
"''",
")",
"cmd",
"=",
"\"\\t\\t id, status, receptor_1, receptor_2, ..., receptor_N\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"cmd",
"=",
"\"\\t\\t CH44, 1, -9.7, -9.3, ..., -10.2\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"cmd",
"=",
"\"\\t\\t ZN44, 0, -6.6, -6.1, ..., -6.8\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"print",
"(",
"''",
")",
"cmd",
"=",
"\"\\t\\tid is a unique molecular identifier\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"cmd",
"=",
"\"\\t\\tstatus takes a value of '1' if the molecule is active and '0' otherwise.\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"cmd",
"=",
"\"\\t\\treceptor_1 through receptor_N are docking scores.\"",
"print",
"(",
"w",
".",
"fill",
"(",
"cmd",
")",
")",
"print",
"(",
"''",
")",
"tfrac",
"=",
"\"--training_fraction : (optional) The fraction of input active molecules\\\n\tallocated to the training set, e.g. 0.40. Defaults to allocate half to the training\\\n\tset.\"",
"print",
"(",
"w",
".",
"fill",
"(",
"tfrac",
")",
")",
"print",
"(",
"''",
")",
"d2a",
"=",
"\"--decoy_to_active : (optional) The decoy to active ratio to establish in the \\\n\ttraining and validation sets. Defaults to maintain the input file ratio.\"",
"print",
"(",
"w",
".",
"fill",
"(",
"d2a",
")",
")",
"print",
"(",
"''",
")"
] |
Prints an extended help message.
|
[
"Prints",
"an",
"extended",
"help",
"message",
"."
] |
341880b79faf8147dc9fa6e90438531cd09fabcc
|
https://github.com/rvswift/EB/blob/341880b79faf8147dc9fa6e90438531cd09fabcc/EB/builder/splitter/splitter_io.py#L26-L70
|
239,453
|
rvswift/EB
|
EB/builder/splitter/splitter_io.py
|
ParseArgs.get_float
|
def get_float(self, input_string):
"""
Return float type user input
"""
if input_string == '--training_fraction':
# was the flag set?
try:
index = self.args.index(input_string) + 1
except ValueError:
# it wasn't, it's optional, so return the appropriate default
return None
# the flag was set, so check if a value was set, otherwise exit
try:
if self.args[index] in self.flags:
print("\n {flag} was set but a value was not specified".format(flag=input_string))
print_short_help()
sys.exit(1)
except IndexError:
print("\n {flag} was set but a value was not specified".format(flag=input_string))
print_short_help()
sys.exit(1)
# a value was set, so check if its the correct type
try:
value = float(self.args[index])
except ValueError:
print("\n {flag} must be a float less than or equal to 1, e.g. 0.4".format(flag=input_string))
print_short_help()
sys.exit(1)
if value > 1.0 or value < 0:
print("\n {flag} must be a float less than or equal to 1, e.g. 0.4".format(flag=input_string))
print_short_help()
sys.exit(1)
# everything checks out, so return the appropriate value
return value
|
python
|
def get_float(self, input_string):
"""
Return float type user input
"""
if input_string == '--training_fraction':
# was the flag set?
try:
index = self.args.index(input_string) + 1
except ValueError:
# it wasn't, it's optional, so return the appropriate default
return None
# the flag was set, so check if a value was set, otherwise exit
try:
if self.args[index] in self.flags:
print("\n {flag} was set but a value was not specified".format(flag=input_string))
print_short_help()
sys.exit(1)
except IndexError:
print("\n {flag} was set but a value was not specified".format(flag=input_string))
print_short_help()
sys.exit(1)
# a value was set, so check if its the correct type
try:
value = float(self.args[index])
except ValueError:
print("\n {flag} must be a float less than or equal to 1, e.g. 0.4".format(flag=input_string))
print_short_help()
sys.exit(1)
if value > 1.0 or value < 0:
print("\n {flag} must be a float less than or equal to 1, e.g. 0.4".format(flag=input_string))
print_short_help()
sys.exit(1)
# everything checks out, so return the appropriate value
return value
|
[
"def",
"get_float",
"(",
"self",
",",
"input_string",
")",
":",
"if",
"input_string",
"==",
"'--training_fraction'",
":",
"# was the flag set?",
"try",
":",
"index",
"=",
"self",
".",
"args",
".",
"index",
"(",
"input_string",
")",
"+",
"1",
"except",
"ValueError",
":",
"# it wasn't, it's optional, so return the appropriate default",
"return",
"None",
"# the flag was set, so check if a value was set, otherwise exit",
"try",
":",
"if",
"self",
".",
"args",
"[",
"index",
"]",
"in",
"self",
".",
"flags",
":",
"print",
"(",
"\"\\n {flag} was set but a value was not specified\"",
".",
"format",
"(",
"flag",
"=",
"input_string",
")",
")",
"print_short_help",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"except",
"IndexError",
":",
"print",
"(",
"\"\\n {flag} was set but a value was not specified\"",
".",
"format",
"(",
"flag",
"=",
"input_string",
")",
")",
"print_short_help",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"# a value was set, so check if its the correct type",
"try",
":",
"value",
"=",
"float",
"(",
"self",
".",
"args",
"[",
"index",
"]",
")",
"except",
"ValueError",
":",
"print",
"(",
"\"\\n {flag} must be a float less than or equal to 1, e.g. 0.4\"",
".",
"format",
"(",
"flag",
"=",
"input_string",
")",
")",
"print_short_help",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"value",
">",
"1.0",
"or",
"value",
"<",
"0",
":",
"print",
"(",
"\"\\n {flag} must be a float less than or equal to 1, e.g. 0.4\"",
".",
"format",
"(",
"flag",
"=",
"input_string",
")",
")",
"print_short_help",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"# everything checks out, so return the appropriate value",
"return",
"value"
] |
Return float type user input
|
[
"Return",
"float",
"type",
"user",
"input"
] |
341880b79faf8147dc9fa6e90438531cd09fabcc
|
https://github.com/rvswift/EB/blob/341880b79faf8147dc9fa6e90438531cd09fabcc/EB/builder/splitter/splitter_io.py#L130-L168
|
239,454
|
samuel-phan/mssh-copy-id
|
msshcopyid/utils.py
|
get_password
|
def get_password(from_stdin_only=False):
"""
Get a password either from STDIN or by prompting the user.
:return: the password.
"""
if not sys.stdin.isatty():
password = sys.stdin.readline().strip()
elif not from_stdin_only:
password = getpass.getpass('Enter the password: ')
else:
password = None
return password
|
python
|
def get_password(from_stdin_only=False):
"""
Get a password either from STDIN or by prompting the user.
:return: the password.
"""
if not sys.stdin.isatty():
password = sys.stdin.readline().strip()
elif not from_stdin_only:
password = getpass.getpass('Enter the password: ')
else:
password = None
return password
|
[
"def",
"get_password",
"(",
"from_stdin_only",
"=",
"False",
")",
":",
"if",
"not",
"sys",
".",
"stdin",
".",
"isatty",
"(",
")",
":",
"password",
"=",
"sys",
".",
"stdin",
".",
"readline",
"(",
")",
".",
"strip",
"(",
")",
"elif",
"not",
"from_stdin_only",
":",
"password",
"=",
"getpass",
".",
"getpass",
"(",
"'Enter the password: '",
")",
"else",
":",
"password",
"=",
"None",
"return",
"password"
] |
Get a password either from STDIN or by prompting the user.
:return: the password.
|
[
"Get",
"a",
"password",
"either",
"from",
"STDIN",
"or",
"by",
"prompting",
"the",
"user",
"."
] |
59c50eabb74c4e0eeb729266df57c285e6661b0b
|
https://github.com/samuel-phan/mssh-copy-id/blob/59c50eabb74c4e0eeb729266df57c285e6661b0b/msshcopyid/utils.py#L14-L27
|
239,455
|
roboogle/gtkmvc3
|
gtkmvco/examples/niew/view.py
|
_Abstract.connect_signals
|
def connect_signals(self, target):
"""
This is deprecated. Pass your controller to connect signals the old
way.
"""
if self.connected:
raise RuntimeError("GtkBuilder can only connect signals once")
self.builder.connect_signals(target)
self.connected = True
|
python
|
def connect_signals(self, target):
"""
This is deprecated. Pass your controller to connect signals the old
way.
"""
if self.connected:
raise RuntimeError("GtkBuilder can only connect signals once")
self.builder.connect_signals(target)
self.connected = True
|
[
"def",
"connect_signals",
"(",
"self",
",",
"target",
")",
":",
"if",
"self",
".",
"connected",
":",
"raise",
"RuntimeError",
"(",
"\"GtkBuilder can only connect signals once\"",
")",
"self",
".",
"builder",
".",
"connect_signals",
"(",
"target",
")",
"self",
".",
"connected",
"=",
"True"
] |
This is deprecated. Pass your controller to connect signals the old
way.
|
[
"This",
"is",
"deprecated",
".",
"Pass",
"your",
"controller",
"to",
"connect",
"signals",
"the",
"old",
"way",
"."
] |
63405fd8d2056be26af49103b13a8d5e57fe4dff
|
https://github.com/roboogle/gtkmvc3/blob/63405fd8d2056be26af49103b13a8d5e57fe4dff/gtkmvco/examples/niew/view.py#L71-L79
|
239,456
|
siemens/django-dingos
|
dingos/filter.py
|
create_order_keyword_list
|
def create_order_keyword_list(keywords):
"""
Takes a given keyword list and returns a ready-to-go
list of possible ordering values.
Example: ['foo'] returns [('foo', ''), ('-foo', '')]
"""
result = []
for keyword in keywords:
result.append((keyword, ''))
result.append(('-%s' % keyword, ''))
return result
|
python
|
def create_order_keyword_list(keywords):
"""
Takes a given keyword list and returns a ready-to-go
list of possible ordering values.
Example: ['foo'] returns [('foo', ''), ('-foo', '')]
"""
result = []
for keyword in keywords:
result.append((keyword, ''))
result.append(('-%s' % keyword, ''))
return result
|
[
"def",
"create_order_keyword_list",
"(",
"keywords",
")",
":",
"result",
"=",
"[",
"]",
"for",
"keyword",
"in",
"keywords",
":",
"result",
".",
"append",
"(",
"(",
"keyword",
",",
"''",
")",
")",
"result",
".",
"append",
"(",
"(",
"'-%s'",
"%",
"keyword",
",",
"''",
")",
")",
"return",
"result"
] |
Takes a given keyword list and returns a ready-to-go
list of possible ordering values.
Example: ['foo'] returns [('foo', ''), ('-foo', '')]
|
[
"Takes",
"a",
"given",
"keyword",
"list",
"and",
"returns",
"a",
"ready",
"-",
"to",
"-",
"go",
"list",
"of",
"possible",
"ordering",
"values",
"."
] |
7154f75b06d2538568e2f2455a76f3d0db0b7d70
|
https://github.com/siemens/django-dingos/blob/7154f75b06d2538568e2f2455a76f3d0db0b7d70/dingos/filter.py#L36-L47
|
239,457
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
loads
|
def loads(s: str, **kwargs) -> JsonObj:
""" Convert a json_str into a JsonObj
:param s: a str instance containing a JSON document
:param kwargs: arguments see: json.load for details
:return: JsonObj representing the json string
"""
if isinstance(s, (bytes, bytearray)):
s = s.decode(json.detect_encoding(s), 'surrogatepass')
return json.loads(s, object_hook=lambda pairs: JsonObj(**pairs), **kwargs)
|
python
|
def loads(s: str, **kwargs) -> JsonObj:
""" Convert a json_str into a JsonObj
:param s: a str instance containing a JSON document
:param kwargs: arguments see: json.load for details
:return: JsonObj representing the json string
"""
if isinstance(s, (bytes, bytearray)):
s = s.decode(json.detect_encoding(s), 'surrogatepass')
return json.loads(s, object_hook=lambda pairs: JsonObj(**pairs), **kwargs)
|
[
"def",
"loads",
"(",
"s",
":",
"str",
",",
"*",
"*",
"kwargs",
")",
"->",
"JsonObj",
":",
"if",
"isinstance",
"(",
"s",
",",
"(",
"bytes",
",",
"bytearray",
")",
")",
":",
"s",
"=",
"s",
".",
"decode",
"(",
"json",
".",
"detect_encoding",
"(",
"s",
")",
",",
"'surrogatepass'",
")",
"return",
"json",
".",
"loads",
"(",
"s",
",",
"object_hook",
"=",
"lambda",
"pairs",
":",
"JsonObj",
"(",
"*",
"*",
"pairs",
")",
",",
"*",
"*",
"kwargs",
")"
] |
Convert a json_str into a JsonObj
:param s: a str instance containing a JSON document
:param kwargs: arguments see: json.load for details
:return: JsonObj representing the json string
|
[
"Convert",
"a",
"json_str",
"into",
"a",
"JsonObj"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L95-L104
|
239,458
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
load
|
def load(source, **kwargs) -> JsonObj:
""" Deserialize a JSON source.
:param source: a URI, File name or a .read()-supporting file-like object containing a JSON document
:param kwargs: arguments. see: json.load for details
:return: JsonObj representing fp
"""
if isinstance(source, str):
if '://' in source:
req = Request(source)
req.add_header("Accept", "application/json, text/json;q=0.9")
with urlopen(req) as response:
jsons = response.read()
else:
with open(source) as f:
jsons = f.read()
elif hasattr(source, "read"):
jsons = source.read()
else:
raise TypeError("Unexpected type {} for source {}".format(type(source), source))
return loads(jsons, **kwargs)
|
python
|
def load(source, **kwargs) -> JsonObj:
""" Deserialize a JSON source.
:param source: a URI, File name or a .read()-supporting file-like object containing a JSON document
:param kwargs: arguments. see: json.load for details
:return: JsonObj representing fp
"""
if isinstance(source, str):
if '://' in source:
req = Request(source)
req.add_header("Accept", "application/json, text/json;q=0.9")
with urlopen(req) as response:
jsons = response.read()
else:
with open(source) as f:
jsons = f.read()
elif hasattr(source, "read"):
jsons = source.read()
else:
raise TypeError("Unexpected type {} for source {}".format(type(source), source))
return loads(jsons, **kwargs)
|
[
"def",
"load",
"(",
"source",
",",
"*",
"*",
"kwargs",
")",
"->",
"JsonObj",
":",
"if",
"isinstance",
"(",
"source",
",",
"str",
")",
":",
"if",
"'://'",
"in",
"source",
":",
"req",
"=",
"Request",
"(",
"source",
")",
"req",
".",
"add_header",
"(",
"\"Accept\"",
",",
"\"application/json, text/json;q=0.9\"",
")",
"with",
"urlopen",
"(",
"req",
")",
"as",
"response",
":",
"jsons",
"=",
"response",
".",
"read",
"(",
")",
"else",
":",
"with",
"open",
"(",
"source",
")",
"as",
"f",
":",
"jsons",
"=",
"f",
".",
"read",
"(",
")",
"elif",
"hasattr",
"(",
"source",
",",
"\"read\"",
")",
":",
"jsons",
"=",
"source",
".",
"read",
"(",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"Unexpected type {} for source {}\"",
".",
"format",
"(",
"type",
"(",
"source",
")",
",",
"source",
")",
")",
"return",
"loads",
"(",
"jsons",
",",
"*",
"*",
"kwargs",
")"
] |
Deserialize a JSON source.
:param source: a URI, File name or a .read()-supporting file-like object containing a JSON document
:param kwargs: arguments. see: json.load for details
:return: JsonObj representing fp
|
[
"Deserialize",
"a",
"JSON",
"source",
"."
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L107-L128
|
239,459
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
as_json
|
def as_json(obj: JsonObj, indent: Optional[str]=' ', **kwargs) -> str:
""" Convert obj to json string representation.
:param obj: pseudo 'self'
:param indent: indent argument to dumps
:param kwargs: other arguments for dumps
:return: JSON formatted string
"""
return obj._as_json_dumps(indent, **kwargs)
|
python
|
def as_json(obj: JsonObj, indent: Optional[str]=' ', **kwargs) -> str:
""" Convert obj to json string representation.
:param obj: pseudo 'self'
:param indent: indent argument to dumps
:param kwargs: other arguments for dumps
:return: JSON formatted string
"""
return obj._as_json_dumps(indent, **kwargs)
|
[
"def",
"as_json",
"(",
"obj",
":",
"JsonObj",
",",
"indent",
":",
"Optional",
"[",
"str",
"]",
"=",
"' '",
",",
"*",
"*",
"kwargs",
")",
"->",
"str",
":",
"return",
"obj",
".",
"_as_json_dumps",
"(",
"indent",
",",
"*",
"*",
"kwargs",
")"
] |
Convert obj to json string representation.
:param obj: pseudo 'self'
:param indent: indent argument to dumps
:param kwargs: other arguments for dumps
:return: JSON formatted string
|
[
"Convert",
"obj",
"to",
"json",
"string",
"representation",
"."
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L148-L156
|
239,460
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
get
|
def get(obj: JsonObj, item: str, default: JsonObjTypes=None) -> JsonObjTypes:
""" Dictionary get routine """
return obj._get(item, default)
|
python
|
def get(obj: JsonObj, item: str, default: JsonObjTypes=None) -> JsonObjTypes:
""" Dictionary get routine """
return obj._get(item, default)
|
[
"def",
"get",
"(",
"obj",
":",
"JsonObj",
",",
"item",
":",
"str",
",",
"default",
":",
"JsonObjTypes",
"=",
"None",
")",
"->",
"JsonObjTypes",
":",
"return",
"obj",
".",
"_get",
"(",
"item",
",",
"default",
")"
] |
Dictionary get routine
|
[
"Dictionary",
"get",
"routine"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L167-L169
|
239,461
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
setdefault
|
def setdefault(obj: JsonObj, k: str, value: Union[Dict, JsonTypes]) -> JsonObjTypes:
""" Dictionary setdefault reoutine """
return obj._setdefault(k, value)
|
python
|
def setdefault(obj: JsonObj, k: str, value: Union[Dict, JsonTypes]) -> JsonObjTypes:
""" Dictionary setdefault reoutine """
return obj._setdefault(k, value)
|
[
"def",
"setdefault",
"(",
"obj",
":",
"JsonObj",
",",
"k",
":",
"str",
",",
"value",
":",
"Union",
"[",
"Dict",
",",
"JsonTypes",
"]",
")",
"->",
"JsonObjTypes",
":",
"return",
"obj",
".",
"_setdefault",
"(",
"k",
",",
"value",
")"
] |
Dictionary setdefault reoutine
|
[
"Dictionary",
"setdefault",
"reoutine"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L172-L174
|
239,462
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
JsonObj._default
|
def _default(self, obj):
""" return a serialized version of obj or raise a TypeError
:param obj:
:return: Serialized version of obj
"""
return obj.__dict__ if isinstance(obj, JsonObj) else json.JSONDecoder().decode(obj)
|
python
|
def _default(self, obj):
""" return a serialized version of obj or raise a TypeError
:param obj:
:return: Serialized version of obj
"""
return obj.__dict__ if isinstance(obj, JsonObj) else json.JSONDecoder().decode(obj)
|
[
"def",
"_default",
"(",
"self",
",",
"obj",
")",
":",
"return",
"obj",
".",
"__dict__",
"if",
"isinstance",
"(",
"obj",
",",
"JsonObj",
")",
"else",
"json",
".",
"JSONDecoder",
"(",
")",
".",
"decode",
"(",
"obj",
")"
] |
return a serialized version of obj or raise a TypeError
:param obj:
:return: Serialized version of obj
|
[
"return",
"a",
"serialized",
"version",
"of",
"obj",
"or",
"raise",
"a",
"TypeError"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L29-L35
|
239,463
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
JsonObj._as_json
|
def _as_json(self, **kwargs) -> str:
""" Convert a JsonObj into straight json text
:param kwargs: json.dumps arguments
:return: JSON formatted str
"""
return json.dumps(self, default=self._default, **kwargs)
|
python
|
def _as_json(self, **kwargs) -> str:
""" Convert a JsonObj into straight json text
:param kwargs: json.dumps arguments
:return: JSON formatted str
"""
return json.dumps(self, default=self._default, **kwargs)
|
[
"def",
"_as_json",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
"->",
"str",
":",
"return",
"json",
".",
"dumps",
"(",
"self",
",",
"default",
"=",
"self",
".",
"_default",
",",
"*",
"*",
"kwargs",
")"
] |
Convert a JsonObj into straight json text
:param kwargs: json.dumps arguments
:return: JSON formatted str
|
[
"Convert",
"a",
"JsonObj",
"into",
"straight",
"json",
"text"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L56-L62
|
239,464
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
JsonObj._as_json_dumps
|
def _as_json_dumps(self, indent: str=' ', **kwargs) -> str:
""" Convert to a stringified json object.
This is the same as _as_json with the exception that it isn't
a property, meaning that we can actually pass arguments...
:param indent: indent argument to dumps
:param kwargs: other arguments for dumps
:return: JSON formatted string
"""
return json.dumps(self, default=self._default, indent=indent, **kwargs)
|
python
|
def _as_json_dumps(self, indent: str=' ', **kwargs) -> str:
""" Convert to a stringified json object.
This is the same as _as_json with the exception that it isn't
a property, meaning that we can actually pass arguments...
:param indent: indent argument to dumps
:param kwargs: other arguments for dumps
:return: JSON formatted string
"""
return json.dumps(self, default=self._default, indent=indent, **kwargs)
|
[
"def",
"_as_json_dumps",
"(",
"self",
",",
"indent",
":",
"str",
"=",
"' '",
",",
"*",
"*",
"kwargs",
")",
"->",
"str",
":",
"return",
"json",
".",
"dumps",
"(",
"self",
",",
"default",
"=",
"self",
".",
"_default",
",",
"indent",
"=",
"indent",
",",
"*",
"*",
"kwargs",
")"
] |
Convert to a stringified json object.
This is the same as _as_json with the exception that it isn't
a property, meaning that we can actually pass arguments...
:param indent: indent argument to dumps
:param kwargs: other arguments for dumps
:return: JSON formatted string
|
[
"Convert",
"to",
"a",
"stringified",
"json",
"object",
"."
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L64-L73
|
239,465
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
JsonObj.__as_list
|
def __as_list(value: List[JsonObjTypes]) -> List[JsonTypes]:
""" Return a json array as a list
:param value: array
:return: array with JsonObj instances removed
"""
return [e._as_dict if isinstance(e, JsonObj) else e for e in value]
|
python
|
def __as_list(value: List[JsonObjTypes]) -> List[JsonTypes]:
""" Return a json array as a list
:param value: array
:return: array with JsonObj instances removed
"""
return [e._as_dict if isinstance(e, JsonObj) else e for e in value]
|
[
"def",
"__as_list",
"(",
"value",
":",
"List",
"[",
"JsonObjTypes",
"]",
")",
"->",
"List",
"[",
"JsonTypes",
"]",
":",
"return",
"[",
"e",
".",
"_as_dict",
"if",
"isinstance",
"(",
"e",
",",
"JsonObj",
")",
"else",
"e",
"for",
"e",
"in",
"value",
"]"
] |
Return a json array as a list
:param value: array
:return: array with JsonObj instances removed
|
[
"Return",
"a",
"json",
"array",
"as",
"a",
"list"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L76-L82
|
239,466
|
hsolbrig/jsonasobj
|
src/jsonasobj/jsonobj.py
|
JsonObj._as_dict
|
def _as_dict(self) -> Dict[str, JsonTypes]:
""" Convert a JsonObj into a straight dictionary
:return: dictionary that cooresponds to the json object
"""
return {k: v._as_dict if isinstance(v, JsonObj) else
self.__as_list(v) if isinstance(v, list) else
v for k, v in self.__dict__.items()}
|
python
|
def _as_dict(self) -> Dict[str, JsonTypes]:
""" Convert a JsonObj into a straight dictionary
:return: dictionary that cooresponds to the json object
"""
return {k: v._as_dict if isinstance(v, JsonObj) else
self.__as_list(v) if isinstance(v, list) else
v for k, v in self.__dict__.items()}
|
[
"def",
"_as_dict",
"(",
"self",
")",
"->",
"Dict",
"[",
"str",
",",
"JsonTypes",
"]",
":",
"return",
"{",
"k",
":",
"v",
".",
"_as_dict",
"if",
"isinstance",
"(",
"v",
",",
"JsonObj",
")",
"else",
"self",
".",
"__as_list",
"(",
"v",
")",
"if",
"isinstance",
"(",
"v",
",",
"list",
")",
"else",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"__dict__",
".",
"items",
"(",
")",
"}"
] |
Convert a JsonObj into a straight dictionary
:return: dictionary that cooresponds to the json object
|
[
"Convert",
"a",
"JsonObj",
"into",
"a",
"straight",
"dictionary"
] |
13ac0838bb85d47bb6b8a667a4c2c69dba93e87d
|
https://github.com/hsolbrig/jsonasobj/blob/13ac0838bb85d47bb6b8a667a4c2c69dba93e87d/src/jsonasobj/jsonobj.py#L85-L92
|
239,467
|
FujiMakoto/AgentML
|
agentml/logger.py
|
InternalLogger.max_entries
|
def max_entries(self):
"""
Removes the maximum entry limit
"""
self._debug_log.info('Removing maximum entries restriction')
self._log_entries(deque(self._log_entries))
|
python
|
def max_entries(self):
"""
Removes the maximum entry limit
"""
self._debug_log.info('Removing maximum entries restriction')
self._log_entries(deque(self._log_entries))
|
[
"def",
"max_entries",
"(",
"self",
")",
":",
"self",
".",
"_debug_log",
".",
"info",
"(",
"'Removing maximum entries restriction'",
")",
"self",
".",
"_log_entries",
"(",
"deque",
"(",
"self",
".",
"_log_entries",
")",
")"
] |
Removes the maximum entry limit
|
[
"Removes",
"the",
"maximum",
"entry",
"limit"
] |
c8cb64b460d876666bf29ea2c682189874c7c403
|
https://github.com/FujiMakoto/AgentML/blob/c8cb64b460d876666bf29ea2c682189874c7c403/agentml/logger.py#L60-L65
|
239,468
|
avanwyk/cipy
|
examples/gc_pso.py
|
main
|
def main(dimension, iterations):
""" Main function to execute gbest GC PSO algorithm.
"""
objective_function = minimize(functions.sphere)
stopping_condition = max_iterations(iterations)
(solution, metrics) = optimize(objective_function=objective_function,
domain=Domain(-5.12, 5.12, dimension),
stopping_condition=stopping_condition,
parameters={'seed': 3758117674,
'rho': 1.0, 'e_s': 15, 'e_f': 5},
velocity_update=gc_velocity_update,
parameter_update=update_rho,
measurements=[fitness_measurement])
return solution
|
python
|
def main(dimension, iterations):
""" Main function to execute gbest GC PSO algorithm.
"""
objective_function = minimize(functions.sphere)
stopping_condition = max_iterations(iterations)
(solution, metrics) = optimize(objective_function=objective_function,
domain=Domain(-5.12, 5.12, dimension),
stopping_condition=stopping_condition,
parameters={'seed': 3758117674,
'rho': 1.0, 'e_s': 15, 'e_f': 5},
velocity_update=gc_velocity_update,
parameter_update=update_rho,
measurements=[fitness_measurement])
return solution
|
[
"def",
"main",
"(",
"dimension",
",",
"iterations",
")",
":",
"objective_function",
"=",
"minimize",
"(",
"functions",
".",
"sphere",
")",
"stopping_condition",
"=",
"max_iterations",
"(",
"iterations",
")",
"(",
"solution",
",",
"metrics",
")",
"=",
"optimize",
"(",
"objective_function",
"=",
"objective_function",
",",
"domain",
"=",
"Domain",
"(",
"-",
"5.12",
",",
"5.12",
",",
"dimension",
")",
",",
"stopping_condition",
"=",
"stopping_condition",
",",
"parameters",
"=",
"{",
"'seed'",
":",
"3758117674",
",",
"'rho'",
":",
"1.0",
",",
"'e_s'",
":",
"15",
",",
"'e_f'",
":",
"5",
"}",
",",
"velocity_update",
"=",
"gc_velocity_update",
",",
"parameter_update",
"=",
"update_rho",
",",
"measurements",
"=",
"[",
"fitness_measurement",
"]",
")",
"return",
"solution"
] |
Main function to execute gbest GC PSO algorithm.
|
[
"Main",
"function",
"to",
"execute",
"gbest",
"GC",
"PSO",
"algorithm",
"."
] |
98450dd01767b3615c113e50dc396f135e177b29
|
https://github.com/avanwyk/cipy/blob/98450dd01767b3615c113e50dc396f135e177b29/examples/gc_pso.py#L27-L40
|
239,469
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper.read_chunks
|
def read_chunks(self):
"""Read chunks from the HTTP client"""
if self.reading_chunks and self.got_chunk:
# we got on the fast-path and directly read from the buffer.
# if we continue to recurse, this is going to blow up the stack.
# so instead return
#
# NOTE: This actually is unnecessary as long as tornado guarantees that
# ioloop.add_callback always gets dispatched via the main io loop
# and they don't introduce a fast-path similar to read_XY
logger.debug("Fast-Path detected, returning...")
return
while not self.got_request:
self.reading_chunks = True
self.got_chunk = False
# chunk starts with length, so read it. This will then subsequently also read the chunk
self.httpstream.read_until("\r\n", self._chunk_length)
self.reading_chunks = False
if self.got_chunk:
# the previous read hit the fast path and read from the buffer
# instead of going through the main polling loop. This means we
# should iteratively issue the next request
logger.debug("Fast-Path detected, iterating...")
continue
else:
break
# if we arrive here, we read the complete request or
# the ioloop has scheduled another call to read_chunks
return
|
python
|
def read_chunks(self):
"""Read chunks from the HTTP client"""
if self.reading_chunks and self.got_chunk:
# we got on the fast-path and directly read from the buffer.
# if we continue to recurse, this is going to blow up the stack.
# so instead return
#
# NOTE: This actually is unnecessary as long as tornado guarantees that
# ioloop.add_callback always gets dispatched via the main io loop
# and they don't introduce a fast-path similar to read_XY
logger.debug("Fast-Path detected, returning...")
return
while not self.got_request:
self.reading_chunks = True
self.got_chunk = False
# chunk starts with length, so read it. This will then subsequently also read the chunk
self.httpstream.read_until("\r\n", self._chunk_length)
self.reading_chunks = False
if self.got_chunk:
# the previous read hit the fast path and read from the buffer
# instead of going through the main polling loop. This means we
# should iteratively issue the next request
logger.debug("Fast-Path detected, iterating...")
continue
else:
break
# if we arrive here, we read the complete request or
# the ioloop has scheduled another call to read_chunks
return
|
[
"def",
"read_chunks",
"(",
"self",
")",
":",
"if",
"self",
".",
"reading_chunks",
"and",
"self",
".",
"got_chunk",
":",
"# we got on the fast-path and directly read from the buffer.",
"# if we continue to recurse, this is going to blow up the stack.",
"# so instead return",
"#",
"# NOTE: This actually is unnecessary as long as tornado guarantees that",
"# ioloop.add_callback always gets dispatched via the main io loop",
"# and they don't introduce a fast-path similar to read_XY",
"logger",
".",
"debug",
"(",
"\"Fast-Path detected, returning...\"",
")",
"return",
"while",
"not",
"self",
".",
"got_request",
":",
"self",
".",
"reading_chunks",
"=",
"True",
"self",
".",
"got_chunk",
"=",
"False",
"# chunk starts with length, so read it. This will then subsequently also read the chunk",
"self",
".",
"httpstream",
".",
"read_until",
"(",
"\"\\r\\n\"",
",",
"self",
".",
"_chunk_length",
")",
"self",
".",
"reading_chunks",
"=",
"False",
"if",
"self",
".",
"got_chunk",
":",
"# the previous read hit the fast path and read from the buffer",
"# instead of going through the main polling loop. This means we ",
"# should iteratively issue the next request",
"logger",
".",
"debug",
"(",
"\"Fast-Path detected, iterating...\"",
")",
"continue",
"else",
":",
"break",
"# if we arrive here, we read the complete request or",
"# the ioloop has scheduled another call to read_chunks",
"return"
] |
Read chunks from the HTTP client
|
[
"Read",
"chunks",
"from",
"the",
"HTTP",
"client"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L139-L171
|
239,470
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper._chunk_length
|
def _chunk_length(self, data):
"""Received the chunk length"""
assert data[-2:] == "\r\n", "CRLF"
length = data[:-2].split(';')[0] # cut off optional length paramters
length = int(length.strip(), 16) # length is in hex
if length:
logger.debug('Got chunk length: %d', length)
self.httpstream.read_bytes(length + 2, self._chunk_data)
else:
logger.debug('Got last chunk (size 0)')
self.got_request = True
# enable input write event so the handler can finish things up
# when it has written all pending data
self.ioloop.update_handler(self.fd_stdin, self.ioloop.WRITE | self.ioloop.ERROR)
|
python
|
def _chunk_length(self, data):
"""Received the chunk length"""
assert data[-2:] == "\r\n", "CRLF"
length = data[:-2].split(';')[0] # cut off optional length paramters
length = int(length.strip(), 16) # length is in hex
if length:
logger.debug('Got chunk length: %d', length)
self.httpstream.read_bytes(length + 2, self._chunk_data)
else:
logger.debug('Got last chunk (size 0)')
self.got_request = True
# enable input write event so the handler can finish things up
# when it has written all pending data
self.ioloop.update_handler(self.fd_stdin, self.ioloop.WRITE | self.ioloop.ERROR)
|
[
"def",
"_chunk_length",
"(",
"self",
",",
"data",
")",
":",
"assert",
"data",
"[",
"-",
"2",
":",
"]",
"==",
"\"\\r\\n\"",
",",
"\"CRLF\"",
"length",
"=",
"data",
"[",
":",
"-",
"2",
"]",
".",
"split",
"(",
"';'",
")",
"[",
"0",
"]",
"# cut off optional length paramters",
"length",
"=",
"int",
"(",
"length",
".",
"strip",
"(",
")",
",",
"16",
")",
"# length is in hex",
"if",
"length",
":",
"logger",
".",
"debug",
"(",
"'Got chunk length: %d'",
",",
"length",
")",
"self",
".",
"httpstream",
".",
"read_bytes",
"(",
"length",
"+",
"2",
",",
"self",
".",
"_chunk_data",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Got last chunk (size 0)'",
")",
"self",
".",
"got_request",
"=",
"True",
"# enable input write event so the handler can finish things up ",
"# when it has written all pending data",
"self",
".",
"ioloop",
".",
"update_handler",
"(",
"self",
".",
"fd_stdin",
",",
"self",
".",
"ioloop",
".",
"WRITE",
"|",
"self",
".",
"ioloop",
".",
"ERROR",
")"
] |
Received the chunk length
|
[
"Received",
"the",
"chunk",
"length"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L173-L189
|
239,471
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper._chunk_data
|
def _chunk_data(self, data):
"""Received chunk data"""
assert data[-2:] == "\r\n", "CRLF"
if self.gzip_decompressor:
if not self.gzip_header_seen:
assert data[:2] == '\x1f\x8b', "gzip header"
self.gzip_header_seen = True
self.process_input_buffer += self.gzip_decompressor.decompress(data[:-2])
else:
self.process_input_buffer += data[:-2]
self.got_chunk = True
if self.process_input_buffer:
# since we now have data in the buffer, enable write events again
logger.debug('Got data in buffer, interested in writing to process again')
self.ioloop.update_handler(self.fd_stdin, self.ioloop.WRITE | self.ioloop.ERROR)
# do NOT call read_chunks directly. This is to give git a chance to consume input.
# we don't want to grow the buffer unnecessarily.
# Additionally, this should mitigate the stack explosion mentioned in read_chunks
self.ioloop.add_callback(self.read_chunks)
|
python
|
def _chunk_data(self, data):
"""Received chunk data"""
assert data[-2:] == "\r\n", "CRLF"
if self.gzip_decompressor:
if not self.gzip_header_seen:
assert data[:2] == '\x1f\x8b', "gzip header"
self.gzip_header_seen = True
self.process_input_buffer += self.gzip_decompressor.decompress(data[:-2])
else:
self.process_input_buffer += data[:-2]
self.got_chunk = True
if self.process_input_buffer:
# since we now have data in the buffer, enable write events again
logger.debug('Got data in buffer, interested in writing to process again')
self.ioloop.update_handler(self.fd_stdin, self.ioloop.WRITE | self.ioloop.ERROR)
# do NOT call read_chunks directly. This is to give git a chance to consume input.
# we don't want to grow the buffer unnecessarily.
# Additionally, this should mitigate the stack explosion mentioned in read_chunks
self.ioloop.add_callback(self.read_chunks)
|
[
"def",
"_chunk_data",
"(",
"self",
",",
"data",
")",
":",
"assert",
"data",
"[",
"-",
"2",
":",
"]",
"==",
"\"\\r\\n\"",
",",
"\"CRLF\"",
"if",
"self",
".",
"gzip_decompressor",
":",
"if",
"not",
"self",
".",
"gzip_header_seen",
":",
"assert",
"data",
"[",
":",
"2",
"]",
"==",
"'\\x1f\\x8b'",
",",
"\"gzip header\"",
"self",
".",
"gzip_header_seen",
"=",
"True",
"self",
".",
"process_input_buffer",
"+=",
"self",
".",
"gzip_decompressor",
".",
"decompress",
"(",
"data",
"[",
":",
"-",
"2",
"]",
")",
"else",
":",
"self",
".",
"process_input_buffer",
"+=",
"data",
"[",
":",
"-",
"2",
"]",
"self",
".",
"got_chunk",
"=",
"True",
"if",
"self",
".",
"process_input_buffer",
":",
"# since we now have data in the buffer, enable write events again",
"logger",
".",
"debug",
"(",
"'Got data in buffer, interested in writing to process again'",
")",
"self",
".",
"ioloop",
".",
"update_handler",
"(",
"self",
".",
"fd_stdin",
",",
"self",
".",
"ioloop",
".",
"WRITE",
"|",
"self",
".",
"ioloop",
".",
"ERROR",
")",
"# do NOT call read_chunks directly. This is to give git a chance to consume input.",
"# we don't want to grow the buffer unnecessarily.",
"# Additionally, this should mitigate the stack explosion mentioned in read_chunks",
"self",
".",
"ioloop",
".",
"add_callback",
"(",
"self",
".",
"read_chunks",
")"
] |
Received chunk data
|
[
"Received",
"chunk",
"data"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L191-L215
|
239,472
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper._handle_stdin_event
|
def _handle_stdin_event(self, fd, events):
"""Eventhandler for stdin"""
assert fd == self.fd_stdin
if events & self.ioloop.ERROR:
# An error at the end is expected since tornado maps HUP to ERROR
logger.debug('Error on stdin')
# ensure pipe is closed
if not self.process.stdin.closed:
self.process.stdin.close()
# remove handler
self.ioloop.remove_handler(self.fd_stdin)
# if all fds are closed, we can finish
return self._graceful_finish()
# got data ready
logger.debug('stdin ready for write')
if self.process_input_buffer:
count = os.write(fd, self.process_input_buffer)
logger.debug('Wrote first %d bytes of %d total', count, len(self.process_input_buffer))
self.process_input_buffer = self.process_input_buffer[count:]
if not self.process_input_buffer:
# consumed everything in the buffer
if self.got_request:
# we got the request and wrote everything to the process
# this means we can close stdin and stop handling events
# for it
logger.debug('Got complete request, closing stdin')
self.process.stdin.close()
self.ioloop.remove_handler(fd)
else:
# There is more data bound to come from the client
# so just disable write events for the moment until
# we got more to write
logger.debug('Not interested in write events on stdin anymore')
self.ioloop.update_handler(fd, self.ioloop.ERROR)
|
python
|
def _handle_stdin_event(self, fd, events):
"""Eventhandler for stdin"""
assert fd == self.fd_stdin
if events & self.ioloop.ERROR:
# An error at the end is expected since tornado maps HUP to ERROR
logger.debug('Error on stdin')
# ensure pipe is closed
if not self.process.stdin.closed:
self.process.stdin.close()
# remove handler
self.ioloop.remove_handler(self.fd_stdin)
# if all fds are closed, we can finish
return self._graceful_finish()
# got data ready
logger.debug('stdin ready for write')
if self.process_input_buffer:
count = os.write(fd, self.process_input_buffer)
logger.debug('Wrote first %d bytes of %d total', count, len(self.process_input_buffer))
self.process_input_buffer = self.process_input_buffer[count:]
if not self.process_input_buffer:
# consumed everything in the buffer
if self.got_request:
# we got the request and wrote everything to the process
# this means we can close stdin and stop handling events
# for it
logger.debug('Got complete request, closing stdin')
self.process.stdin.close()
self.ioloop.remove_handler(fd)
else:
# There is more data bound to come from the client
# so just disable write events for the moment until
# we got more to write
logger.debug('Not interested in write events on stdin anymore')
self.ioloop.update_handler(fd, self.ioloop.ERROR)
|
[
"def",
"_handle_stdin_event",
"(",
"self",
",",
"fd",
",",
"events",
")",
":",
"assert",
"fd",
"==",
"self",
".",
"fd_stdin",
"if",
"events",
"&",
"self",
".",
"ioloop",
".",
"ERROR",
":",
"# An error at the end is expected since tornado maps HUP to ERROR",
"logger",
".",
"debug",
"(",
"'Error on stdin'",
")",
"# ensure pipe is closed",
"if",
"not",
"self",
".",
"process",
".",
"stdin",
".",
"closed",
":",
"self",
".",
"process",
".",
"stdin",
".",
"close",
"(",
")",
"# remove handler",
"self",
".",
"ioloop",
".",
"remove_handler",
"(",
"self",
".",
"fd_stdin",
")",
"# if all fds are closed, we can finish",
"return",
"self",
".",
"_graceful_finish",
"(",
")",
"# got data ready",
"logger",
".",
"debug",
"(",
"'stdin ready for write'",
")",
"if",
"self",
".",
"process_input_buffer",
":",
"count",
"=",
"os",
".",
"write",
"(",
"fd",
",",
"self",
".",
"process_input_buffer",
")",
"logger",
".",
"debug",
"(",
"'Wrote first %d bytes of %d total'",
",",
"count",
",",
"len",
"(",
"self",
".",
"process_input_buffer",
")",
")",
"self",
".",
"process_input_buffer",
"=",
"self",
".",
"process_input_buffer",
"[",
"count",
":",
"]",
"if",
"not",
"self",
".",
"process_input_buffer",
":",
"# consumed everything in the buffer",
"if",
"self",
".",
"got_request",
":",
"# we got the request and wrote everything to the process",
"# this means we can close stdin and stop handling events",
"# for it",
"logger",
".",
"debug",
"(",
"'Got complete request, closing stdin'",
")",
"self",
".",
"process",
".",
"stdin",
".",
"close",
"(",
")",
"self",
".",
"ioloop",
".",
"remove_handler",
"(",
"fd",
")",
"else",
":",
"# There is more data bound to come from the client",
"# so just disable write events for the moment until ",
"# we got more to write",
"logger",
".",
"debug",
"(",
"'Not interested in write events on stdin anymore'",
")",
"self",
".",
"ioloop",
".",
"update_handler",
"(",
"fd",
",",
"self",
".",
"ioloop",
".",
"ERROR",
")"
] |
Eventhandler for stdin
|
[
"Eventhandler",
"for",
"stdin"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L217-L254
|
239,473
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper._handle_stdout_event
|
def _handle_stdout_event(self, fd, events):
"""Eventhandler for stdout"""
assert fd == self.fd_stdout
if events & self.ioloop.READ:
# got data ready to read
data = ''
# Now basically we have two cases: either the client supports
# HTTP/1.1 in which case we can stream the answer in chunked mode
# in HTTP/1.0 we need to send a content-length and thus buffer the complete output
if self.request.supports_http_1_1():
if not self.headers_sent:
self.sent_chunks = True
self.headers.update({'Date': get_date_header(), 'Transfer-Encoding': 'chunked'})
data = 'HTTP/1.1 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
if self.output_prelude:
data += hex(len(self.output_prelude))[2:] + "\r\n" # cut off 0x
data += self.output_prelude + "\r\n"
self.headers_sent = True
payload = os.read(fd, 8192)
if events & self.ioloop.ERROR: # there might be data remaining in the buffer if we got HUP, get it all
remainder = True
while remainder != '': # until EOF
remainder = os.read(fd, 8192)
payload += remainder
data += hex(len(payload))[2:] + "\r\n" # cut off 0x
data += payload + "\r\n"
else:
if not self.headers_sent:
# Use the over-eager blocking read that will get everything until we hit EOF
# this might actually be somewhat dangerous as noted in the subprocess documentation
# and lead to a deadlock. This is only a legacy mode for HTTP/1.0 clients anyway,
# so we might want to remove it entirely anyways
payload = self.process.stdout.read()
self.headers.update({'Date': get_date_header(), 'Content-Length': str(len(payload))})
data = 'HTTP/1.0 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
self.headers_sent = True
data += self.output_prelude + payload
else:
# this is actually somewhat illegal as it messes with content-length but
# it shouldn't happen anyways, as the read above should have read anything
# python docs say this can happen on ttys...
logger.error("This should not happen")
data = self.process.stdout.read()
if len(data) == 8200:
self.number_of_8k_chunks_sent += 1
else:
if self.number_of_8k_chunks_sent > 0:
logger.debug('Sent %d * 8192 bytes', self.number_of_8k_chunks_sent)
self.number_of_8k_chunks_sent = 0
logger.debug('Sending stdout to client %d bytes: %r', len(data), data[:20])
self.request.write(data)
# now we can also have an error. This is because tornado maps HUP onto error
# therefore, no elif here!
if events & self.ioloop.ERROR:
logger.debug('Error on stdout')
# ensure file is closed
if not self.process.stdout.closed:
self.process.stdout.close()
# remove handler
self.ioloop.remove_handler(self.fd_stdout)
# if all fds are closed, we can finish
return self._graceful_finish()
|
python
|
def _handle_stdout_event(self, fd, events):
"""Eventhandler for stdout"""
assert fd == self.fd_stdout
if events & self.ioloop.READ:
# got data ready to read
data = ''
# Now basically we have two cases: either the client supports
# HTTP/1.1 in which case we can stream the answer in chunked mode
# in HTTP/1.0 we need to send a content-length and thus buffer the complete output
if self.request.supports_http_1_1():
if not self.headers_sent:
self.sent_chunks = True
self.headers.update({'Date': get_date_header(), 'Transfer-Encoding': 'chunked'})
data = 'HTTP/1.1 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
if self.output_prelude:
data += hex(len(self.output_prelude))[2:] + "\r\n" # cut off 0x
data += self.output_prelude + "\r\n"
self.headers_sent = True
payload = os.read(fd, 8192)
if events & self.ioloop.ERROR: # there might be data remaining in the buffer if we got HUP, get it all
remainder = True
while remainder != '': # until EOF
remainder = os.read(fd, 8192)
payload += remainder
data += hex(len(payload))[2:] + "\r\n" # cut off 0x
data += payload + "\r\n"
else:
if not self.headers_sent:
# Use the over-eager blocking read that will get everything until we hit EOF
# this might actually be somewhat dangerous as noted in the subprocess documentation
# and lead to a deadlock. This is only a legacy mode for HTTP/1.0 clients anyway,
# so we might want to remove it entirely anyways
payload = self.process.stdout.read()
self.headers.update({'Date': get_date_header(), 'Content-Length': str(len(payload))})
data = 'HTTP/1.0 200 OK\r\n' + '\r\n'.join([ k + ': ' + v for k, v in self.headers.items()]) + '\r\n\r\n'
self.headers_sent = True
data += self.output_prelude + payload
else:
# this is actually somewhat illegal as it messes with content-length but
# it shouldn't happen anyways, as the read above should have read anything
# python docs say this can happen on ttys...
logger.error("This should not happen")
data = self.process.stdout.read()
if len(data) == 8200:
self.number_of_8k_chunks_sent += 1
else:
if self.number_of_8k_chunks_sent > 0:
logger.debug('Sent %d * 8192 bytes', self.number_of_8k_chunks_sent)
self.number_of_8k_chunks_sent = 0
logger.debug('Sending stdout to client %d bytes: %r', len(data), data[:20])
self.request.write(data)
# now we can also have an error. This is because tornado maps HUP onto error
# therefore, no elif here!
if events & self.ioloop.ERROR:
logger.debug('Error on stdout')
# ensure file is closed
if not self.process.stdout.closed:
self.process.stdout.close()
# remove handler
self.ioloop.remove_handler(self.fd_stdout)
# if all fds are closed, we can finish
return self._graceful_finish()
|
[
"def",
"_handle_stdout_event",
"(",
"self",
",",
"fd",
",",
"events",
")",
":",
"assert",
"fd",
"==",
"self",
".",
"fd_stdout",
"if",
"events",
"&",
"self",
".",
"ioloop",
".",
"READ",
":",
"# got data ready to read",
"data",
"=",
"''",
"# Now basically we have two cases: either the client supports",
"# HTTP/1.1 in which case we can stream the answer in chunked mode",
"# in HTTP/1.0 we need to send a content-length and thus buffer the complete output",
"if",
"self",
".",
"request",
".",
"supports_http_1_1",
"(",
")",
":",
"if",
"not",
"self",
".",
"headers_sent",
":",
"self",
".",
"sent_chunks",
"=",
"True",
"self",
".",
"headers",
".",
"update",
"(",
"{",
"'Date'",
":",
"get_date_header",
"(",
")",
",",
"'Transfer-Encoding'",
":",
"'chunked'",
"}",
")",
"data",
"=",
"'HTTP/1.1 200 OK\\r\\n'",
"+",
"'\\r\\n'",
".",
"join",
"(",
"[",
"k",
"+",
"': '",
"+",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"headers",
".",
"items",
"(",
")",
"]",
")",
"+",
"'\\r\\n\\r\\n'",
"if",
"self",
".",
"output_prelude",
":",
"data",
"+=",
"hex",
"(",
"len",
"(",
"self",
".",
"output_prelude",
")",
")",
"[",
"2",
":",
"]",
"+",
"\"\\r\\n\"",
"# cut off 0x",
"data",
"+=",
"self",
".",
"output_prelude",
"+",
"\"\\r\\n\"",
"self",
".",
"headers_sent",
"=",
"True",
"payload",
"=",
"os",
".",
"read",
"(",
"fd",
",",
"8192",
")",
"if",
"events",
"&",
"self",
".",
"ioloop",
".",
"ERROR",
":",
"# there might be data remaining in the buffer if we got HUP, get it all",
"remainder",
"=",
"True",
"while",
"remainder",
"!=",
"''",
":",
"# until EOF",
"remainder",
"=",
"os",
".",
"read",
"(",
"fd",
",",
"8192",
")",
"payload",
"+=",
"remainder",
"data",
"+=",
"hex",
"(",
"len",
"(",
"payload",
")",
")",
"[",
"2",
":",
"]",
"+",
"\"\\r\\n\"",
"# cut off 0x",
"data",
"+=",
"payload",
"+",
"\"\\r\\n\"",
"else",
":",
"if",
"not",
"self",
".",
"headers_sent",
":",
"# Use the over-eager blocking read that will get everything until we hit EOF",
"# this might actually be somewhat dangerous as noted in the subprocess documentation",
"# and lead to a deadlock. This is only a legacy mode for HTTP/1.0 clients anyway,",
"# so we might want to remove it entirely anyways",
"payload",
"=",
"self",
".",
"process",
".",
"stdout",
".",
"read",
"(",
")",
"self",
".",
"headers",
".",
"update",
"(",
"{",
"'Date'",
":",
"get_date_header",
"(",
")",
",",
"'Content-Length'",
":",
"str",
"(",
"len",
"(",
"payload",
")",
")",
"}",
")",
"data",
"=",
"'HTTP/1.0 200 OK\\r\\n'",
"+",
"'\\r\\n'",
".",
"join",
"(",
"[",
"k",
"+",
"': '",
"+",
"v",
"for",
"k",
",",
"v",
"in",
"self",
".",
"headers",
".",
"items",
"(",
")",
"]",
")",
"+",
"'\\r\\n\\r\\n'",
"self",
".",
"headers_sent",
"=",
"True",
"data",
"+=",
"self",
".",
"output_prelude",
"+",
"payload",
"else",
":",
"# this is actually somewhat illegal as it messes with content-length but ",
"# it shouldn't happen anyways, as the read above should have read anything",
"# python docs say this can happen on ttys...",
"logger",
".",
"error",
"(",
"\"This should not happen\"",
")",
"data",
"=",
"self",
".",
"process",
".",
"stdout",
".",
"read",
"(",
")",
"if",
"len",
"(",
"data",
")",
"==",
"8200",
":",
"self",
".",
"number_of_8k_chunks_sent",
"+=",
"1",
"else",
":",
"if",
"self",
".",
"number_of_8k_chunks_sent",
">",
"0",
":",
"logger",
".",
"debug",
"(",
"'Sent %d * 8192 bytes'",
",",
"self",
".",
"number_of_8k_chunks_sent",
")",
"self",
".",
"number_of_8k_chunks_sent",
"=",
"0",
"logger",
".",
"debug",
"(",
"'Sending stdout to client %d bytes: %r'",
",",
"len",
"(",
"data",
")",
",",
"data",
"[",
":",
"20",
"]",
")",
"self",
".",
"request",
".",
"write",
"(",
"data",
")",
"# now we can also have an error. This is because tornado maps HUP onto error",
"# therefore, no elif here!",
"if",
"events",
"&",
"self",
".",
"ioloop",
".",
"ERROR",
":",
"logger",
".",
"debug",
"(",
"'Error on stdout'",
")",
"# ensure file is closed",
"if",
"not",
"self",
".",
"process",
".",
"stdout",
".",
"closed",
":",
"self",
".",
"process",
".",
"stdout",
".",
"close",
"(",
")",
"# remove handler",
"self",
".",
"ioloop",
".",
"remove_handler",
"(",
"self",
".",
"fd_stdout",
")",
"# if all fds are closed, we can finish",
"return",
"self",
".",
"_graceful_finish",
"(",
")"
] |
Eventhandler for stdout
|
[
"Eventhandler",
"for",
"stdout"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L256-L328
|
239,474
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper._handle_stderr_event
|
def _handle_stderr_event(self, fd, events):
"""Eventhandler for stderr"""
assert fd == self.fd_stderr
if events & self.ioloop.READ:
# got data ready
if not self.headers_sent:
payload = self.process.stderr.read()
data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
self.headers_sent = True
data += payload
else:
# see stdout
logger.error("This should not happen (stderr)")
data = self.process.stderr.read()
logger.debug('Sending stderr to client: %r', data)
self.request.write(data)
if events & self.ioloop.ERROR:
logger.debug('Error on stderr')
# ensure file is closed
if not self.process.stderr.closed:
self.process.stderr.close()
# remove handler
self.ioloop.remove_handler(self.fd_stderr)
# if all fds are closed, we can finish
return self._graceful_finish()
|
python
|
def _handle_stderr_event(self, fd, events):
"""Eventhandler for stderr"""
assert fd == self.fd_stderr
if events & self.ioloop.READ:
# got data ready
if not self.headers_sent:
payload = self.process.stderr.read()
data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
self.headers_sent = True
data += payload
else:
# see stdout
logger.error("This should not happen (stderr)")
data = self.process.stderr.read()
logger.debug('Sending stderr to client: %r', data)
self.request.write(data)
if events & self.ioloop.ERROR:
logger.debug('Error on stderr')
# ensure file is closed
if not self.process.stderr.closed:
self.process.stderr.close()
# remove handler
self.ioloop.remove_handler(self.fd_stderr)
# if all fds are closed, we can finish
return self._graceful_finish()
|
[
"def",
"_handle_stderr_event",
"(",
"self",
",",
"fd",
",",
"events",
")",
":",
"assert",
"fd",
"==",
"self",
".",
"fd_stderr",
"if",
"events",
"&",
"self",
".",
"ioloop",
".",
"READ",
":",
"# got data ready",
"if",
"not",
"self",
".",
"headers_sent",
":",
"payload",
"=",
"self",
".",
"process",
".",
"stderr",
".",
"read",
"(",
")",
"data",
"=",
"'HTTP/1.1 500 Internal Server Error\\r\\nDate: %s\\r\\nContent-Length: %d\\r\\n\\r\\n'",
"%",
"(",
"get_date_header",
"(",
")",
",",
"len",
"(",
"payload",
")",
")",
"self",
".",
"headers_sent",
"=",
"True",
"data",
"+=",
"payload",
"else",
":",
"# see stdout",
"logger",
".",
"error",
"(",
"\"This should not happen (stderr)\"",
")",
"data",
"=",
"self",
".",
"process",
".",
"stderr",
".",
"read",
"(",
")",
"logger",
".",
"debug",
"(",
"'Sending stderr to client: %r'",
",",
"data",
")",
"self",
".",
"request",
".",
"write",
"(",
"data",
")",
"if",
"events",
"&",
"self",
".",
"ioloop",
".",
"ERROR",
":",
"logger",
".",
"debug",
"(",
"'Error on stderr'",
")",
"# ensure file is closed",
"if",
"not",
"self",
".",
"process",
".",
"stderr",
".",
"closed",
":",
"self",
".",
"process",
".",
"stderr",
".",
"close",
"(",
")",
"# remove handler",
"self",
".",
"ioloop",
".",
"remove_handler",
"(",
"self",
".",
"fd_stderr",
")",
"# if all fds are closed, we can finish",
"return",
"self",
".",
"_graceful_finish",
"(",
")"
] |
Eventhandler for stderr
|
[
"Eventhandler",
"for",
"stderr"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L330-L359
|
239,475
|
mensi/gittornado
|
gittornado/iowrapper.py
|
ProcessWrapper._graceful_finish
|
def _graceful_finish(self):
"""Detect if process has closed pipes and we can finish"""
if not self.process.stdout.closed or not self.process.stderr.closed:
return # stdout/stderr still open
if not self.process.stdin.closed:
self.process.stdin.close()
if self.number_of_8k_chunks_sent > 0:
logger.debug('Sent %d * 8k chunks', self.number_of_8k_chunks_sent)
logger.debug("Finishing up. Process poll: %r", self.process.poll())
if not self.headers_sent:
retval = self.process.poll()
if retval != 0:
logger.warning("Empty response. Git return value: " + str(retval))
payload = "Did not produce any data. Errorcode: " + str(retval)
data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
self.headers_sent = True
data += payload
self.request.write(data)
else:
data = 'HTTP/1.1 200 Ok\r\nDate: %s\r\nContent-Length: 0\r\n\r\n' % get_date_header()
self.headers_sent = True
self.request.write(data)
# if we are in chunked mode, send end chunk with length 0
elif self.sent_chunks:
logger.debug("End chunk")
self.request.write("0\r\n")
#we could now send some more headers resp. trailers
self.request.write("\r\n")
self.request.finish()
|
python
|
def _graceful_finish(self):
"""Detect if process has closed pipes and we can finish"""
if not self.process.stdout.closed or not self.process.stderr.closed:
return # stdout/stderr still open
if not self.process.stdin.closed:
self.process.stdin.close()
if self.number_of_8k_chunks_sent > 0:
logger.debug('Sent %d * 8k chunks', self.number_of_8k_chunks_sent)
logger.debug("Finishing up. Process poll: %r", self.process.poll())
if not self.headers_sent:
retval = self.process.poll()
if retval != 0:
logger.warning("Empty response. Git return value: " + str(retval))
payload = "Did not produce any data. Errorcode: " + str(retval)
data = 'HTTP/1.1 500 Internal Server Error\r\nDate: %s\r\nContent-Length: %d\r\n\r\n' % (get_date_header(), len(payload))
self.headers_sent = True
data += payload
self.request.write(data)
else:
data = 'HTTP/1.1 200 Ok\r\nDate: %s\r\nContent-Length: 0\r\n\r\n' % get_date_header()
self.headers_sent = True
self.request.write(data)
# if we are in chunked mode, send end chunk with length 0
elif self.sent_chunks:
logger.debug("End chunk")
self.request.write("0\r\n")
#we could now send some more headers resp. trailers
self.request.write("\r\n")
self.request.finish()
|
[
"def",
"_graceful_finish",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"process",
".",
"stdout",
".",
"closed",
"or",
"not",
"self",
".",
"process",
".",
"stderr",
".",
"closed",
":",
"return",
"# stdout/stderr still open",
"if",
"not",
"self",
".",
"process",
".",
"stdin",
".",
"closed",
":",
"self",
".",
"process",
".",
"stdin",
".",
"close",
"(",
")",
"if",
"self",
".",
"number_of_8k_chunks_sent",
">",
"0",
":",
"logger",
".",
"debug",
"(",
"'Sent %d * 8k chunks'",
",",
"self",
".",
"number_of_8k_chunks_sent",
")",
"logger",
".",
"debug",
"(",
"\"Finishing up. Process poll: %r\"",
",",
"self",
".",
"process",
".",
"poll",
"(",
")",
")",
"if",
"not",
"self",
".",
"headers_sent",
":",
"retval",
"=",
"self",
".",
"process",
".",
"poll",
"(",
")",
"if",
"retval",
"!=",
"0",
":",
"logger",
".",
"warning",
"(",
"\"Empty response. Git return value: \"",
"+",
"str",
"(",
"retval",
")",
")",
"payload",
"=",
"\"Did not produce any data. Errorcode: \"",
"+",
"str",
"(",
"retval",
")",
"data",
"=",
"'HTTP/1.1 500 Internal Server Error\\r\\nDate: %s\\r\\nContent-Length: %d\\r\\n\\r\\n'",
"%",
"(",
"get_date_header",
"(",
")",
",",
"len",
"(",
"payload",
")",
")",
"self",
".",
"headers_sent",
"=",
"True",
"data",
"+=",
"payload",
"self",
".",
"request",
".",
"write",
"(",
"data",
")",
"else",
":",
"data",
"=",
"'HTTP/1.1 200 Ok\\r\\nDate: %s\\r\\nContent-Length: 0\\r\\n\\r\\n'",
"%",
"get_date_header",
"(",
")",
"self",
".",
"headers_sent",
"=",
"True",
"self",
".",
"request",
".",
"write",
"(",
"data",
")",
"# if we are in chunked mode, send end chunk with length 0",
"elif",
"self",
".",
"sent_chunks",
":",
"logger",
".",
"debug",
"(",
"\"End chunk\"",
")",
"self",
".",
"request",
".",
"write",
"(",
"\"0\\r\\n\"",
")",
"#we could now send some more headers resp. trailers",
"self",
".",
"request",
".",
"write",
"(",
"\"\\r\\n\"",
")",
"self",
".",
"request",
".",
"finish",
"(",
")"
] |
Detect if process has closed pipes and we can finish
|
[
"Detect",
"if",
"process",
"has",
"closed",
"pipes",
"and",
"we",
"can",
"finish"
] |
adf86b5537064337c806cce0e71eacaabc8bb610
|
https://github.com/mensi/gittornado/blob/adf86b5537064337c806cce0e71eacaabc8bb610/gittornado/iowrapper.py#L361-L396
|
239,476
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._genTimeoutList
|
def _genTimeoutList(self, const):
"""
Generates a dict of the valid timeout values for the given `const`.
The `const` value may change for different versions or types of
Pololu boards.
"""
result = {}
for v in range(128):
x = v & 0x0F
y = (v >> 4) & 0x07
if not y or (y and x > 7):
result[const * x * 2**y] = v
self._log and self._log.debug("Timeout list: %s", result)
return result
|
python
|
def _genTimeoutList(self, const):
"""
Generates a dict of the valid timeout values for the given `const`.
The `const` value may change for different versions or types of
Pololu boards.
"""
result = {}
for v in range(128):
x = v & 0x0F
y = (v >> 4) & 0x07
if not y or (y and x > 7):
result[const * x * 2**y] = v
self._log and self._log.debug("Timeout list: %s", result)
return result
|
[
"def",
"_genTimeoutList",
"(",
"self",
",",
"const",
")",
":",
"result",
"=",
"{",
"}",
"for",
"v",
"in",
"range",
"(",
"128",
")",
":",
"x",
"=",
"v",
"&",
"0x0F",
"y",
"=",
"(",
"v",
">>",
"4",
")",
"&",
"0x07",
"if",
"not",
"y",
"or",
"(",
"y",
"and",
"x",
">",
"7",
")",
":",
"result",
"[",
"const",
"*",
"x",
"*",
"2",
"**",
"y",
"]",
"=",
"v",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"debug",
"(",
"\"Timeout list: %s\"",
",",
"result",
")",
"return",
"result"
] |
Generates a dict of the valid timeout values for the given `const`.
The `const` value may change for different versions or types of
Pololu boards.
|
[
"Generates",
"a",
"dict",
"of",
"the",
"valid",
"timeout",
"values",
"for",
"the",
"given",
"const",
".",
"The",
"const",
"value",
"may",
"change",
"for",
"different",
"versions",
"or",
"types",
"of",
"Pololu",
"boards",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L55-L71
|
239,477
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik.findConnectedDevices
|
def findConnectedDevices(self):
"""
Find all the devices on the serial buss and store the results in a
class member object.
"""
tmpTimeout = self._serial.timeout
self._serial.timeout = 0.01
for dev in range(128):
device = self._getDeviceID(dev)
if device is not None and int(device) not in self._deviceConfig:
config = self._deviceConfig.setdefault(int(device), {})
self._deviceCallback(device, config)
self._log and self._log.info(
"Found device '%s' with configuration: %s", device, config)
self._serial.timeout = tmpTimeout
|
python
|
def findConnectedDevices(self):
"""
Find all the devices on the serial buss and store the results in a
class member object.
"""
tmpTimeout = self._serial.timeout
self._serial.timeout = 0.01
for dev in range(128):
device = self._getDeviceID(dev)
if device is not None and int(device) not in self._deviceConfig:
config = self._deviceConfig.setdefault(int(device), {})
self._deviceCallback(device, config)
self._log and self._log.info(
"Found device '%s' with configuration: %s", device, config)
self._serial.timeout = tmpTimeout
|
[
"def",
"findConnectedDevices",
"(",
"self",
")",
":",
"tmpTimeout",
"=",
"self",
".",
"_serial",
".",
"timeout",
"self",
".",
"_serial",
".",
"timeout",
"=",
"0.01",
"for",
"dev",
"in",
"range",
"(",
"128",
")",
":",
"device",
"=",
"self",
".",
"_getDeviceID",
"(",
"dev",
")",
"if",
"device",
"is",
"not",
"None",
"and",
"int",
"(",
"device",
")",
"not",
"in",
"self",
".",
"_deviceConfig",
":",
"config",
"=",
"self",
".",
"_deviceConfig",
".",
"setdefault",
"(",
"int",
"(",
"device",
")",
",",
"{",
"}",
")",
"self",
".",
"_deviceCallback",
"(",
"device",
",",
"config",
")",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"info",
"(",
"\"Found device '%s' with configuration: %s\"",
",",
"device",
",",
"config",
")",
"self",
".",
"_serial",
".",
"timeout",
"=",
"tmpTimeout"
] |
Find all the devices on the serial buss and store the results in a
class member object.
|
[
"Find",
"all",
"the",
"devices",
"on",
"the",
"serial",
"buss",
"and",
"store",
"the",
"results",
"in",
"a",
"class",
"member",
"object",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L73-L90
|
239,478
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik.setCompactProtocol
|
def setCompactProtocol(self):
"""
Set the compact protocol.
"""
self._compact = True
self._serial.write(bytes(self._BAUD_DETECT))
self._log and self._log.debug("Compact protocol has been set.")
|
python
|
def setCompactProtocol(self):
"""
Set the compact protocol.
"""
self._compact = True
self._serial.write(bytes(self._BAUD_DETECT))
self._log and self._log.debug("Compact protocol has been set.")
|
[
"def",
"setCompactProtocol",
"(",
"self",
")",
":",
"self",
".",
"_compact",
"=",
"True",
"self",
".",
"_serial",
".",
"write",
"(",
"bytes",
"(",
"self",
".",
"_BAUD_DETECT",
")",
")",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"debug",
"(",
"\"Compact protocol has been set.\"",
")"
] |
Set the compact protocol.
|
[
"Set",
"the",
"compact",
"protocol",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L121-L127
|
239,479
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik.setPololuProtocol
|
def setPololuProtocol(self):
"""
Set the pololu protocol.
"""
self._compact = False
self._log and self._log.debug("Pololu protocol has been set.")
|
python
|
def setPololuProtocol(self):
"""
Set the pololu protocol.
"""
self._compact = False
self._log and self._log.debug("Pololu protocol has been set.")
|
[
"def",
"setPololuProtocol",
"(",
"self",
")",
":",
"self",
".",
"_compact",
"=",
"False",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"debug",
"(",
"\"Pololu protocol has been set.\"",
")"
] |
Set the pololu protocol.
|
[
"Set",
"the",
"pololu",
"protocol",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L139-L144
|
239,480
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._writeData
|
def _writeData(self, command, device, params=()):
"""
Write the data to the device.
:Parameters:
command : `int`
The command to write to the device.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
params : `tuple`
Sequence of bytes to write.
:Exceptions:
* `SerialTimeoutException`
If the low level serial package times out.
* `SerialException`
IO error when the port is not open.
"""
sequence = []
if self._compact:
sequence.append(command | 0x80)
else:
sequence.append(self._BAUD_DETECT)
sequence.append(device)
sequence.append(command)
for param in params:
sequence.append(param)
if self._crc:
sequence.append(crc7(sequence))
self._serial.write(bytearray(sequence))
self._log and self._log.debug("Wrote byte sequence: %s",
[hex(num) for num in sequence])
|
python
|
def _writeData(self, command, device, params=()):
"""
Write the data to the device.
:Parameters:
command : `int`
The command to write to the device.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
params : `tuple`
Sequence of bytes to write.
:Exceptions:
* `SerialTimeoutException`
If the low level serial package times out.
* `SerialException`
IO error when the port is not open.
"""
sequence = []
if self._compact:
sequence.append(command | 0x80)
else:
sequence.append(self._BAUD_DETECT)
sequence.append(device)
sequence.append(command)
for param in params:
sequence.append(param)
if self._crc:
sequence.append(crc7(sequence))
self._serial.write(bytearray(sequence))
self._log and self._log.debug("Wrote byte sequence: %s",
[hex(num) for num in sequence])
|
[
"def",
"_writeData",
"(",
"self",
",",
"command",
",",
"device",
",",
"params",
"=",
"(",
")",
")",
":",
"sequence",
"=",
"[",
"]",
"if",
"self",
".",
"_compact",
":",
"sequence",
".",
"append",
"(",
"command",
"|",
"0x80",
")",
"else",
":",
"sequence",
".",
"append",
"(",
"self",
".",
"_BAUD_DETECT",
")",
"sequence",
".",
"append",
"(",
"device",
")",
"sequence",
".",
"append",
"(",
"command",
")",
"for",
"param",
"in",
"params",
":",
"sequence",
".",
"append",
"(",
"param",
")",
"if",
"self",
".",
"_crc",
":",
"sequence",
".",
"append",
"(",
"crc7",
"(",
"sequence",
")",
")",
"self",
".",
"_serial",
".",
"write",
"(",
"bytearray",
"(",
"sequence",
")",
")",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"debug",
"(",
"\"Wrote byte sequence: %s\"",
",",
"[",
"hex",
"(",
"num",
")",
"for",
"num",
"in",
"sequence",
"]",
")"
] |
Write the data to the device.
:Parameters:
command : `int`
The command to write to the device.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
params : `tuple`
Sequence of bytes to write.
:Exceptions:
* `SerialTimeoutException`
If the low level serial package times out.
* `SerialException`
IO error when the port is not open.
|
[
"Write",
"the",
"data",
"to",
"the",
"device",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L175-L211
|
239,481
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._getFirmwareVersion
|
def _getFirmwareVersion(self, device):
"""
Get the firmware version.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
An integer indicating the version number.
"""
cmd = self._COMMAND.get('get-fw-version')
self._writeData(cmd, device)
try:
result = self._serial.read(size=1)
result = int(result)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except ValueError as e:
result = None
return result
|
python
|
def _getFirmwareVersion(self, device):
"""
Get the firmware version.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
An integer indicating the version number.
"""
cmd = self._COMMAND.get('get-fw-version')
self._writeData(cmd, device)
try:
result = self._serial.read(size=1)
result = int(result)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except ValueError as e:
result = None
return result
|
[
"def",
"_getFirmwareVersion",
"(",
"self",
",",
"device",
")",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'get-fw-version'",
")",
"self",
".",
"_writeData",
"(",
"cmd",
",",
"device",
")",
"try",
":",
"result",
"=",
"self",
".",
"_serial",
".",
"read",
"(",
"size",
"=",
"1",
")",
"result",
"=",
"int",
"(",
"result",
")",
"except",
"serial",
".",
"SerialException",
"as",
"e",
":",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"error",
"(",
"\"Error: %s\"",
",",
"e",
",",
"exc_info",
"=",
"True",
")",
"raise",
"e",
"except",
"ValueError",
"as",
"e",
":",
"result",
"=",
"None",
"return",
"result"
] |
Get the firmware version.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
An integer indicating the version number.
|
[
"Get",
"the",
"firmware",
"version",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L213-L237
|
239,482
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._getError
|
def _getError(self, device, message):
"""
Get the error message or value stored in the Qik hardware.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A list of text messages, integers, or and empty list. See the
`message` parameter above.
"""
cmd = self._COMMAND.get('get-error')
self._writeData(cmd, device)
result = []
bits = []
try:
num = self._serial.read(size=1)
num = ord(num)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except TypeError as e:
num = 0
for i in range(7, -1, -1):
bit = num & (1 << i)
if bit:
if message:
result.append(self._ERRORS.get(bit))
else:
result.append(bit)
return result
|
python
|
def _getError(self, device, message):
"""
Get the error message or value stored in the Qik hardware.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A list of text messages, integers, or and empty list. See the
`message` parameter above.
"""
cmd = self._COMMAND.get('get-error')
self._writeData(cmd, device)
result = []
bits = []
try:
num = self._serial.read(size=1)
num = ord(num)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except TypeError as e:
num = 0
for i in range(7, -1, -1):
bit = num & (1 << i)
if bit:
if message:
result.append(self._ERRORS.get(bit))
else:
result.append(bit)
return result
|
[
"def",
"_getError",
"(",
"self",
",",
"device",
",",
"message",
")",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'get-error'",
")",
"self",
".",
"_writeData",
"(",
"cmd",
",",
"device",
")",
"result",
"=",
"[",
"]",
"bits",
"=",
"[",
"]",
"try",
":",
"num",
"=",
"self",
".",
"_serial",
".",
"read",
"(",
"size",
"=",
"1",
")",
"num",
"=",
"ord",
"(",
"num",
")",
"except",
"serial",
".",
"SerialException",
"as",
"e",
":",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"error",
"(",
"\"Error: %s\"",
",",
"e",
",",
"exc_info",
"=",
"True",
")",
"raise",
"e",
"except",
"TypeError",
"as",
"e",
":",
"num",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"7",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"bit",
"=",
"num",
"&",
"(",
"1",
"<<",
"i",
")",
"if",
"bit",
":",
"if",
"message",
":",
"result",
".",
"append",
"(",
"self",
".",
"_ERRORS",
".",
"get",
"(",
"bit",
")",
")",
"else",
":",
"result",
".",
"append",
"(",
"bit",
")",
"return",
"result"
] |
Get the error message or value stored in the Qik hardware.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A list of text messages, integers, or and empty list. See the
`message` parameter above.
|
[
"Get",
"the",
"error",
"message",
"or",
"value",
"stored",
"in",
"the",
"Qik",
"hardware",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L239-L278
|
239,483
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._getConfig
|
def _getConfig(self, num, device):
"""
Low level method used for all get config commands.
:Parameters:
num : `int`
Number that indicates the config option to get from the hardware.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
An integer representing the value stored in the hardware device.
"""
cmd = self._COMMAND.get('get-config')
self._writeData(cmd, device, params=(num,))
try:
result = self._serial.read(size=1)
result = ord(result)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except TypeError as e:
result = None
return result
|
python
|
def _getConfig(self, num, device):
"""
Low level method used for all get config commands.
:Parameters:
num : `int`
Number that indicates the config option to get from the hardware.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
An integer representing the value stored in the hardware device.
"""
cmd = self._COMMAND.get('get-config')
self._writeData(cmd, device, params=(num,))
try:
result = self._serial.read(size=1)
result = ord(result)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except TypeError as e:
result = None
return result
|
[
"def",
"_getConfig",
"(",
"self",
",",
"num",
",",
"device",
")",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'get-config'",
")",
"self",
".",
"_writeData",
"(",
"cmd",
",",
"device",
",",
"params",
"=",
"(",
"num",
",",
")",
")",
"try",
":",
"result",
"=",
"self",
".",
"_serial",
".",
"read",
"(",
"size",
"=",
"1",
")",
"result",
"=",
"ord",
"(",
"result",
")",
"except",
"serial",
".",
"SerialException",
"as",
"e",
":",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"error",
"(",
"\"Error: %s\"",
",",
"e",
",",
"exc_info",
"=",
"True",
")",
"raise",
"e",
"except",
"TypeError",
"as",
"e",
":",
"result",
"=",
"None",
"return",
"result"
] |
Low level method used for all get config commands.
:Parameters:
num : `int`
Number that indicates the config option to get from the hardware.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
An integer representing the value stored in the hardware device.
|
[
"Low",
"level",
"method",
"used",
"for",
"all",
"get",
"config",
"commands",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L280-L306
|
239,484
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._getPWMFrequency
|
def _getPWMFrequency(self, device, message):
"""
Get the PWM frequency stored on the hardware device.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A text message or an int. See the `message` parameter above.
"""
result = self._getConfig(self.PWM_PARAM, device)
freq, msg = self._CONFIG_PWM.get(result, (result, 'Invalid Frequency'))
if message:
result = msg
else:
result = freq
return result
|
python
|
def _getPWMFrequency(self, device, message):
"""
Get the PWM frequency stored on the hardware device.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A text message or an int. See the `message` parameter above.
"""
result = self._getConfig(self.PWM_PARAM, device)
freq, msg = self._CONFIG_PWM.get(result, (result, 'Invalid Frequency'))
if message:
result = msg
else:
result = freq
return result
|
[
"def",
"_getPWMFrequency",
"(",
"self",
",",
"device",
",",
"message",
")",
":",
"result",
"=",
"self",
".",
"_getConfig",
"(",
"self",
".",
"PWM_PARAM",
",",
"device",
")",
"freq",
",",
"msg",
"=",
"self",
".",
"_CONFIG_PWM",
".",
"get",
"(",
"result",
",",
"(",
"result",
",",
"'Invalid Frequency'",
")",
")",
"if",
"message",
":",
"result",
"=",
"msg",
"else",
":",
"result",
"=",
"freq",
"return",
"result"
] |
Get the PWM frequency stored on the hardware device.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A text message or an int. See the `message` parameter above.
|
[
"Get",
"the",
"PWM",
"frequency",
"stored",
"on",
"the",
"hardware",
"device",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L322-L345
|
239,485
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._getSerialTimeout
|
def _getSerialTimeout(self, device):
"""
Get the serial timeout stored on the hardware device.
Caution, more that one value returned from the Qik can have the same
actual timeout value according the the formula below. I have verified
this as an idiosyncrasy of the Qik itself. There are only a total of
72 unique values that the Qik can logically use the remaining 56
values are repeats of the 72.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
The timeout value in seconds.
"""
num = self._getConfig(self.SERIAL_TIMEOUT, device)
if isinstance(num, int):
x = num & 0x0F
y = (num >> 4) & 0x07
result = self.DEFAULT_SERIAL_TIMEOUT * x * pow(2, y)
else:
result = num
return result
|
python
|
def _getSerialTimeout(self, device):
"""
Get the serial timeout stored on the hardware device.
Caution, more that one value returned from the Qik can have the same
actual timeout value according the the formula below. I have verified
this as an idiosyncrasy of the Qik itself. There are only a total of
72 unique values that the Qik can logically use the remaining 56
values are repeats of the 72.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
The timeout value in seconds.
"""
num = self._getConfig(self.SERIAL_TIMEOUT, device)
if isinstance(num, int):
x = num & 0x0F
y = (num >> 4) & 0x07
result = self.DEFAULT_SERIAL_TIMEOUT * x * pow(2, y)
else:
result = num
return result
|
[
"def",
"_getSerialTimeout",
"(",
"self",
",",
"device",
")",
":",
"num",
"=",
"self",
".",
"_getConfig",
"(",
"self",
".",
"SERIAL_TIMEOUT",
",",
"device",
")",
"if",
"isinstance",
"(",
"num",
",",
"int",
")",
":",
"x",
"=",
"num",
"&",
"0x0F",
"y",
"=",
"(",
"num",
">>",
"4",
")",
"&",
"0x07",
"result",
"=",
"self",
".",
"DEFAULT_SERIAL_TIMEOUT",
"*",
"x",
"*",
"pow",
"(",
"2",
",",
"y",
")",
"else",
":",
"result",
"=",
"num",
"return",
"result"
] |
Get the serial timeout stored on the hardware device.
Caution, more that one value returned from the Qik can have the same
actual timeout value according the the formula below. I have verified
this as an idiosyncrasy of the Qik itself. There are only a total of
72 unique values that the Qik can logically use the remaining 56
values are repeats of the 72.
:Parameters:
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
:Returns:
The timeout value in seconds.
|
[
"Get",
"the",
"serial",
"timeout",
"stored",
"on",
"the",
"hardware",
"device",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L361-L388
|
239,486
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._setConfig
|
def _setConfig(self, num, value, device, message):
"""
Low level method used for all set config commands.
:Parameters:
num : `int`
Number that indicates the config option to get from the hardware.
value : `int`
The value to set in the hardware device.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A text message or an int. See the `message` parameter above.
:Exceptions:
* `SerialException`
IO error indicating there was a problem reading from the serial
connection.
"""
cmd = self._COMMAND.get('set-config')
self._writeData(cmd, device, params=(num, value, 0x55, 0x2A))
try:
result = self._serial.read(size=1)
result = ord(result)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except TypeError as e:
result = None
if result is not None and message:
result = self._CONFIG_RETURN.get(
result, 'Unknown return value: {}'.format(result))
return result
|
python
|
def _setConfig(self, num, value, device, message):
"""
Low level method used for all set config commands.
:Parameters:
num : `int`
Number that indicates the config option to get from the hardware.
value : `int`
The value to set in the hardware device.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A text message or an int. See the `message` parameter above.
:Exceptions:
* `SerialException`
IO error indicating there was a problem reading from the serial
connection.
"""
cmd = self._COMMAND.get('set-config')
self._writeData(cmd, device, params=(num, value, 0x55, 0x2A))
try:
result = self._serial.read(size=1)
result = ord(result)
except serial.SerialException as e:
self._log and self._log.error("Error: %s", e, exc_info=True)
raise e
except TypeError as e:
result = None
if result is not None and message:
result = self._CONFIG_RETURN.get(
result, 'Unknown return value: {}'.format(result))
return result
|
[
"def",
"_setConfig",
"(",
"self",
",",
"num",
",",
"value",
",",
"device",
",",
"message",
")",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'set-config'",
")",
"self",
".",
"_writeData",
"(",
"cmd",
",",
"device",
",",
"params",
"=",
"(",
"num",
",",
"value",
",",
"0x55",
",",
"0x2A",
")",
")",
"try",
":",
"result",
"=",
"self",
".",
"_serial",
".",
"read",
"(",
"size",
"=",
"1",
")",
"result",
"=",
"ord",
"(",
"result",
")",
"except",
"serial",
".",
"SerialException",
"as",
"e",
":",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"error",
"(",
"\"Error: %s\"",
",",
"e",
",",
"exc_info",
"=",
"True",
")",
"raise",
"e",
"except",
"TypeError",
"as",
"e",
":",
"result",
"=",
"None",
"if",
"result",
"is",
"not",
"None",
"and",
"message",
":",
"result",
"=",
"self",
".",
"_CONFIG_RETURN",
".",
"get",
"(",
"result",
",",
"'Unknown return value: {}'",
".",
"format",
"(",
"result",
")",
")",
"return",
"result"
] |
Low level method used for all set config commands.
:Parameters:
num : `int`
Number that indicates the config option to get from the hardware.
value : `int`
The value to set in the hardware device.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
message : `bool`
If set to `True` a text message will be returned, if set to `False`
the integer stored in the Qik will be returned.
:Returns:
A text message or an int. See the `message` parameter above.
:Exceptions:
* `SerialException`
IO error indicating there was a problem reading from the serial
connection.
|
[
"Low",
"level",
"method",
"used",
"for",
"all",
"set",
"config",
"commands",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L390-L430
|
239,487
|
cnobile2012/pololu-motors
|
pololu/motors/qik.py
|
Qik._setSpeed
|
def _setSpeed(self, speed, motor, device):
"""
Set motor speed. This method takes into consideration the PWM frequency
that the hardware is currently running at and limits the values passed
to the hardware accordingly.
:Parameters:
speed : `int`
Motor speed as an integer. Negative numbers indicate reverse
speeds.
motor : `str`
A string value indicating the motor to set the speed on.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
"""
reverse = False
if speed < 0:
speed = -speed
reverse = True
# 0 and 2 for Qik 2s9v1, 0, 2, and 4 for 2s12v10
if self._deviceConfig[device]['pwm'] in (0, 2, 4,) and speed > 127:
speed = 127
if speed > 127:
if speed > 255:
speed = 255
if reverse:
cmd = self._COMMAND.get('{}-reverse-8bit'.format(motor))
else:
cmd = self._COMMAND.get('{}-forward-8bit'.format(motor))
speed -= 128
else:
if reverse:
cmd = self._COMMAND.get('{}-reverse-7bit'.format(motor))
else:
cmd = self._COMMAND.get('{}-forward-7bit'.format(motor))
if not cmd:
msg = "Invalid motor specified: {}".format(motor)
self._log and self._log.error(msg)
raise ValueError(msg)
self._writeData(cmd, device, params=(speed,))
|
python
|
def _setSpeed(self, speed, motor, device):
"""
Set motor speed. This method takes into consideration the PWM frequency
that the hardware is currently running at and limits the values passed
to the hardware accordingly.
:Parameters:
speed : `int`
Motor speed as an integer. Negative numbers indicate reverse
speeds.
motor : `str`
A string value indicating the motor to set the speed on.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
"""
reverse = False
if speed < 0:
speed = -speed
reverse = True
# 0 and 2 for Qik 2s9v1, 0, 2, and 4 for 2s12v10
if self._deviceConfig[device]['pwm'] in (0, 2, 4,) and speed > 127:
speed = 127
if speed > 127:
if speed > 255:
speed = 255
if reverse:
cmd = self._COMMAND.get('{}-reverse-8bit'.format(motor))
else:
cmd = self._COMMAND.get('{}-forward-8bit'.format(motor))
speed -= 128
else:
if reverse:
cmd = self._COMMAND.get('{}-reverse-7bit'.format(motor))
else:
cmd = self._COMMAND.get('{}-forward-7bit'.format(motor))
if not cmd:
msg = "Invalid motor specified: {}".format(motor)
self._log and self._log.error(msg)
raise ValueError(msg)
self._writeData(cmd, device, params=(speed,))
|
[
"def",
"_setSpeed",
"(",
"self",
",",
"speed",
",",
"motor",
",",
"device",
")",
":",
"reverse",
"=",
"False",
"if",
"speed",
"<",
"0",
":",
"speed",
"=",
"-",
"speed",
"reverse",
"=",
"True",
"# 0 and 2 for Qik 2s9v1, 0, 2, and 4 for 2s12v10",
"if",
"self",
".",
"_deviceConfig",
"[",
"device",
"]",
"[",
"'pwm'",
"]",
"in",
"(",
"0",
",",
"2",
",",
"4",
",",
")",
"and",
"speed",
">",
"127",
":",
"speed",
"=",
"127",
"if",
"speed",
">",
"127",
":",
"if",
"speed",
">",
"255",
":",
"speed",
"=",
"255",
"if",
"reverse",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'{}-reverse-8bit'",
".",
"format",
"(",
"motor",
")",
")",
"else",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'{}-forward-8bit'",
".",
"format",
"(",
"motor",
")",
")",
"speed",
"-=",
"128",
"else",
":",
"if",
"reverse",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'{}-reverse-7bit'",
".",
"format",
"(",
"motor",
")",
")",
"else",
":",
"cmd",
"=",
"self",
".",
"_COMMAND",
".",
"get",
"(",
"'{}-forward-7bit'",
".",
"format",
"(",
"motor",
")",
")",
"if",
"not",
"cmd",
":",
"msg",
"=",
"\"Invalid motor specified: {}\"",
".",
"format",
"(",
"motor",
")",
"self",
".",
"_log",
"and",
"self",
".",
"_log",
".",
"error",
"(",
"msg",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"self",
".",
"_writeData",
"(",
"cmd",
",",
"device",
",",
"params",
"=",
"(",
"speed",
",",
")",
")"
] |
Set motor speed. This method takes into consideration the PWM frequency
that the hardware is currently running at and limits the values passed
to the hardware accordingly.
:Parameters:
speed : `int`
Motor speed as an integer. Negative numbers indicate reverse
speeds.
motor : `str`
A string value indicating the motor to set the speed on.
device : `int`
The device is the integer number of the hardware devices ID and
is only used with the Pololu Protocol.
|
[
"Set",
"motor",
"speed",
".",
"This",
"method",
"takes",
"into",
"consideration",
"the",
"PWM",
"frequency",
"that",
"the",
"hardware",
"is",
"currently",
"running",
"at",
"and",
"limits",
"the",
"values",
"passed",
"to",
"the",
"hardware",
"accordingly",
"."
] |
453d2283a63cfe15cda96cad6dffa73372d52a7c
|
https://github.com/cnobile2012/pololu-motors/blob/453d2283a63cfe15cda96cad6dffa73372d52a7c/pololu/motors/qik.py#L558-L605
|
239,488
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_key_names
|
def get_key_names(self):
"""
Gets keys of all elements stored in this map.
:return: a list with all map keys.
"""
names = []
for (k, _) in self.items():
names.append(k)
return names
|
python
|
def get_key_names(self):
"""
Gets keys of all elements stored in this map.
:return: a list with all map keys.
"""
names = []
for (k, _) in self.items():
names.append(k)
return names
|
[
"def",
"get_key_names",
"(",
"self",
")",
":",
"names",
"=",
"[",
"]",
"for",
"(",
"k",
",",
"_",
")",
"in",
"self",
".",
"items",
"(",
")",
":",
"names",
".",
"append",
"(",
"k",
")",
"return",
"names"
] |
Gets keys of all elements stored in this map.
:return: a list with all map keys.
|
[
"Gets",
"keys",
"of",
"all",
"elements",
"stored",
"in",
"this",
"map",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L46-L55
|
239,489
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.append
|
def append(self, map):
"""
Appends new elements to this map.
:param map: a map with elements to be added.
"""
if isinstance(map, dict):
for (k, v) in map.items():
key = StringConverter.to_string(k)
value = v
self.put(key, value)
|
python
|
def append(self, map):
"""
Appends new elements to this map.
:param map: a map with elements to be added.
"""
if isinstance(map, dict):
for (k, v) in map.items():
key = StringConverter.to_string(k)
value = v
self.put(key, value)
|
[
"def",
"append",
"(",
"self",
",",
"map",
")",
":",
"if",
"isinstance",
"(",
"map",
",",
"dict",
")",
":",
"for",
"(",
"k",
",",
"v",
")",
"in",
"map",
".",
"items",
"(",
")",
":",
"key",
"=",
"StringConverter",
".",
"to_string",
"(",
"k",
")",
"value",
"=",
"v",
"self",
".",
"put",
"(",
"key",
",",
"value",
")"
] |
Appends new elements to this map.
:param map: a map with elements to be added.
|
[
"Appends",
"new",
"elements",
"to",
"this",
"map",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L86-L96
|
239,490
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.set_as_object
|
def set_as_object(self, value):
"""
Sets a new value to map element
:param value: a new element or map value.
"""
self.clear()
map = MapConverter.to_map(value)
self.append(map)
|
python
|
def set_as_object(self, value):
"""
Sets a new value to map element
:param value: a new element or map value.
"""
self.clear()
map = MapConverter.to_map(value)
self.append(map)
|
[
"def",
"set_as_object",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"clear",
"(",
")",
"map",
"=",
"MapConverter",
".",
"to_map",
"(",
"value",
")",
"self",
".",
"append",
"(",
"map",
")"
] |
Sets a new value to map element
:param value: a new element or map value.
|
[
"Sets",
"a",
"new",
"value",
"to",
"map",
"element"
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L112-L120
|
239,491
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_nullable_string
|
def get_as_nullable_string(self, key):
"""
Converts map element into a string or returns None if conversion is not possible.
:param key: an index of element to get.
:return: string value of the element or None if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_nullable_string(value)
|
python
|
def get_as_nullable_string(self, key):
"""
Converts map element into a string or returns None if conversion is not possible.
:param key: an index of element to get.
:return: string value of the element or None if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_nullable_string(value)
|
[
"def",
"get_as_nullable_string",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"StringConverter",
".",
"to_nullable_string",
"(",
"value",
")"
] |
Converts map element into a string or returns None if conversion is not possible.
:param key: an index of element to get.
:return: string value of the element or None if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"a",
"string",
"or",
"returns",
"None",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L164-L173
|
239,492
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_string
|
def get_as_string(self, key):
"""
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_string(value)
|
python
|
def get_as_string(self, key):
"""
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_string(value)
|
[
"def",
"get_as_string",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"StringConverter",
".",
"to_string",
"(",
"value",
")"
] |
Converts map element into a string or returns "" if conversion is not possible.
:param key: an index of element to get.
:return: string value ot the element or "" if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"a",
"string",
"or",
"returns",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L175-L184
|
239,493
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_string_with_default
|
def get_as_string_with_default(self, key, default_value):
"""
Converts map element into a string or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: string value ot the element or default value if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_string_with_default(value, default_value)
|
python
|
def get_as_string_with_default(self, key, default_value):
"""
Converts map element into a string or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: string value ot the element or default value if conversion is not supported.
"""
value = self.get(key)
return StringConverter.to_string_with_default(value, default_value)
|
[
"def",
"get_as_string_with_default",
"(",
"self",
",",
"key",
",",
"default_value",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"StringConverter",
".",
"to_string_with_default",
"(",
"value",
",",
"default_value",
")"
] |
Converts map element into a string or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: string value ot the element or default value if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"a",
"string",
"or",
"returns",
"default",
"value",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L186-L197
|
239,494
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_nullable_boolean
|
def get_as_nullable_boolean(self, key):
"""
Converts map element into a boolean or returns None if conversion is not possible
:param key: an index of element to get.
:return: boolean value of the element or None if conversion is not supported.
"""
value = self.get(key)
return BooleanConverter.to_nullable_boolean(value)
|
python
|
def get_as_nullable_boolean(self, key):
"""
Converts map element into a boolean or returns None if conversion is not possible
:param key: an index of element to get.
:return: boolean value of the element or None if conversion is not supported.
"""
value = self.get(key)
return BooleanConverter.to_nullable_boolean(value)
|
[
"def",
"get_as_nullable_boolean",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"BooleanConverter",
".",
"to_nullable_boolean",
"(",
"value",
")"
] |
Converts map element into a boolean or returns None if conversion is not possible
:param key: an index of element to get.
:return: boolean value of the element or None if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"a",
"boolean",
"or",
"returns",
"None",
"if",
"conversion",
"is",
"not",
"possible"
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L199-L208
|
239,495
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_boolean
|
def get_as_boolean(self, key):
"""
Converts map element into a boolean or returns false if conversion is not possible.
:param key: an index of element to get.
:return: boolean value ot the element or false if conversion is not supported.
"""
value = self.get(key)
return BooleanConverter.to_boolean(value)
|
python
|
def get_as_boolean(self, key):
"""
Converts map element into a boolean or returns false if conversion is not possible.
:param key: an index of element to get.
:return: boolean value ot the element or false if conversion is not supported.
"""
value = self.get(key)
return BooleanConverter.to_boolean(value)
|
[
"def",
"get_as_boolean",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"BooleanConverter",
".",
"to_boolean",
"(",
"value",
")"
] |
Converts map element into a boolean or returns false if conversion is not possible.
:param key: an index of element to get.
:return: boolean value ot the element or false if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"a",
"boolean",
"or",
"returns",
"false",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L210-L219
|
239,496
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_boolean_with_default
|
def get_as_boolean_with_default(self, key, default_value):
"""
Converts map element into a boolean or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: boolean value ot the element or default value if conversion is not supported.
"""
value = self.get(key)
return BooleanConverter.to_boolean_with_default(value, default_value)
|
python
|
def get_as_boolean_with_default(self, key, default_value):
"""
Converts map element into a boolean or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: boolean value ot the element or default value if conversion is not supported.
"""
value = self.get(key)
return BooleanConverter.to_boolean_with_default(value, default_value)
|
[
"def",
"get_as_boolean_with_default",
"(",
"self",
",",
"key",
",",
"default_value",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"BooleanConverter",
".",
"to_boolean_with_default",
"(",
"value",
",",
"default_value",
")"
] |
Converts map element into a boolean or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: boolean value ot the element or default value if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"a",
"boolean",
"or",
"returns",
"default",
"value",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L221-L232
|
239,497
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_nullable_integer
|
def get_as_nullable_integer(self, key):
"""
Converts map element into an integer or returns None if conversion is not possible.
:param key: an index of element to get.
:return: integer value of the element or None if conversion is not supported.
"""
value = self.get(key)
return IntegerConverter.to_nullable_integer(value)
|
python
|
def get_as_nullable_integer(self, key):
"""
Converts map element into an integer or returns None if conversion is not possible.
:param key: an index of element to get.
:return: integer value of the element or None if conversion is not supported.
"""
value = self.get(key)
return IntegerConverter.to_nullable_integer(value)
|
[
"def",
"get_as_nullable_integer",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"IntegerConverter",
".",
"to_nullable_integer",
"(",
"value",
")"
] |
Converts map element into an integer or returns None if conversion is not possible.
:param key: an index of element to get.
:return: integer value of the element or None if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"an",
"integer",
"or",
"returns",
"None",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L234-L243
|
239,498
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_integer
|
def get_as_integer(self, key):
"""
Converts map element into an integer or returns 0 if conversion is not possible.
:param key: an index of element to get.
:return: integer value ot the element or 0 if conversion is not supported.
"""
value = self.get(key)
return IntegerConverter.to_integer(value)
|
python
|
def get_as_integer(self, key):
"""
Converts map element into an integer or returns 0 if conversion is not possible.
:param key: an index of element to get.
:return: integer value ot the element or 0 if conversion is not supported.
"""
value = self.get(key)
return IntegerConverter.to_integer(value)
|
[
"def",
"get_as_integer",
"(",
"self",
",",
"key",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"IntegerConverter",
".",
"to_integer",
"(",
"value",
")"
] |
Converts map element into an integer or returns 0 if conversion is not possible.
:param key: an index of element to get.
:return: integer value ot the element or 0 if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"an",
"integer",
"or",
"returns",
"0",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L245-L254
|
239,499
|
pip-services3-python/pip-services3-commons-python
|
pip_services3_commons/data/AnyValueMap.py
|
AnyValueMap.get_as_integer_with_default
|
def get_as_integer_with_default(self, key, default_value):
"""
Converts map element into an integer or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: integer value ot the element or default value if conversion is not supported.
"""
value = self.get(key)
return IntegerConverter.to_integer_with_default(value, default_value)
|
python
|
def get_as_integer_with_default(self, key, default_value):
"""
Converts map element into an integer or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: integer value ot the element or default value if conversion is not supported.
"""
value = self.get(key)
return IntegerConverter.to_integer_with_default(value, default_value)
|
[
"def",
"get_as_integer_with_default",
"(",
"self",
",",
"key",
",",
"default_value",
")",
":",
"value",
"=",
"self",
".",
"get",
"(",
"key",
")",
"return",
"IntegerConverter",
".",
"to_integer_with_default",
"(",
"value",
",",
"default_value",
")"
] |
Converts map element into an integer or returns default value if conversion is not possible.
:param key: an index of element to get.
:param default_value: the default value
:return: integer value ot the element or default value if conversion is not supported.
|
[
"Converts",
"map",
"element",
"into",
"an",
"integer",
"or",
"returns",
"default",
"value",
"if",
"conversion",
"is",
"not",
"possible",
"."
] |
22cbbb3e91e49717f65c083d36147fdb07ba9e3b
|
https://github.com/pip-services3-python/pip-services3-commons-python/blob/22cbbb3e91e49717f65c083d36147fdb07ba9e3b/pip_services3_commons/data/AnyValueMap.py#L256-L267
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.