id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
245,800
minhhoit/yacms
yacms/template/__init__.py
Library.inclusion_tag
def inclusion_tag(self, name, context_class=Context, takes_context=False): """ Replacement for Django's ``inclusion_tag`` which looks up device specific templates at render time. """ def tag_decorator(tag_func): @wraps(tag_func) def tag_wrapper(parser, token): class InclusionTagNode(template.Node): def render(self, context): if not getattr(self, "nodelist", False): try: request = context["request"] except KeyError: t = get_template(name) else: ts = templates_for_device(request, name) t = select_template(ts) self.template = t parts = [template.Variable(part).resolve(context) for part in token.split_contents()[1:]] if takes_context: parts.insert(0, context) result = tag_func(*parts) autoescape = context.autoescape context = context_class(result, autoescape=autoescape) return self.template.render(context) return InclusionTagNode() return self.tag(tag_wrapper) return tag_decorator
python
def inclusion_tag(self, name, context_class=Context, takes_context=False): """ Replacement for Django's ``inclusion_tag`` which looks up device specific templates at render time. """ def tag_decorator(tag_func): @wraps(tag_func) def tag_wrapper(parser, token): class InclusionTagNode(template.Node): def render(self, context): if not getattr(self, "nodelist", False): try: request = context["request"] except KeyError: t = get_template(name) else: ts = templates_for_device(request, name) t = select_template(ts) self.template = t parts = [template.Variable(part).resolve(context) for part in token.split_contents()[1:]] if takes_context: parts.insert(0, context) result = tag_func(*parts) autoescape = context.autoescape context = context_class(result, autoescape=autoescape) return self.template.render(context) return InclusionTagNode() return self.tag(tag_wrapper) return tag_decorator
[ "def", "inclusion_tag", "(", "self", ",", "name", ",", "context_class", "=", "Context", ",", "takes_context", "=", "False", ")", ":", "def", "tag_decorator", "(", "tag_func", ")", ":", "@", "wraps", "(", "tag_func", ")", "def", "tag_wrapper", "(", "parser", ",", "token", ")", ":", "class", "InclusionTagNode", "(", "template", ".", "Node", ")", ":", "def", "render", "(", "self", ",", "context", ")", ":", "if", "not", "getattr", "(", "self", ",", "\"nodelist\"", ",", "False", ")", ":", "try", ":", "request", "=", "context", "[", "\"request\"", "]", "except", "KeyError", ":", "t", "=", "get_template", "(", "name", ")", "else", ":", "ts", "=", "templates_for_device", "(", "request", ",", "name", ")", "t", "=", "select_template", "(", "ts", ")", "self", ".", "template", "=", "t", "parts", "=", "[", "template", ".", "Variable", "(", "part", ")", ".", "resolve", "(", "context", ")", "for", "part", "in", "token", ".", "split_contents", "(", ")", "[", "1", ":", "]", "]", "if", "takes_context", ":", "parts", ".", "insert", "(", "0", ",", "context", ")", "result", "=", "tag_func", "(", "*", "parts", ")", "autoescape", "=", "context", ".", "autoescape", "context", "=", "context_class", "(", "result", ",", "autoescape", "=", "autoescape", ")", "return", "self", ".", "template", ".", "render", "(", "context", ")", "return", "InclusionTagNode", "(", ")", "return", "self", ".", "tag", "(", "tag_wrapper", ")", "return", "tag_decorator" ]
Replacement for Django's ``inclusion_tag`` which looks up device specific templates at render time.
[ "Replacement", "for", "Django", "s", "inclusion_tag", "which", "looks", "up", "device", "specific", "templates", "at", "render", "time", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/template/__init__.py#L92-L126
245,801
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.user_list
def user_list(self, params=None): """Lists all users within the tenant.""" uri = 'openstack/users' if params: uri += '?%s' % urllib.urlencode(params) resp, body = self.get(uri) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
python
def user_list(self, params=None): """Lists all users within the tenant.""" uri = 'openstack/users' if params: uri += '?%s' % urllib.urlencode(params) resp, body = self.get(uri) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
[ "def", "user_list", "(", "self", ",", "params", "=", "None", ")", ":", "uri", "=", "'openstack/users'", "if", "params", ":", "uri", "+=", "'?%s'", "%", "urllib", ".", "urlencode", "(", "params", ")", "resp", ",", "body", "=", "self", ".", "get", "(", "uri", ")", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "body", "=", "json", ".", "loads", "(", "body", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "body", ")" ]
Lists all users within the tenant.
[ "Lists", "all", "users", "within", "the", "tenant", "." ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L11-L21
245,802
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.user_invite
def user_invite(self, username, email, roles): """ Invite a user to the tenant. """ uri = 'openstack/users' data = { "username": username, "email": email, "roles": list(set(roles)) } post_body = json.dumps(data) resp, body = self.post(uri, body=post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
python
def user_invite(self, username, email, roles): """ Invite a user to the tenant. """ uri = 'openstack/users' data = { "username": username, "email": email, "roles": list(set(roles)) } post_body = json.dumps(data) resp, body = self.post(uri, body=post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
[ "def", "user_invite", "(", "self", ",", "username", ",", "email", ",", "roles", ")", ":", "uri", "=", "'openstack/users'", "data", "=", "{", "\"username\"", ":", "username", ",", "\"email\"", ":", "email", ",", "\"roles\"", ":", "list", "(", "set", "(", "roles", ")", ")", "}", "post_body", "=", "json", ".", "dumps", "(", "data", ")", "resp", ",", "body", "=", "self", ".", "post", "(", "uri", ",", "body", "=", "post_body", ")", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "body", "=", "json", ".", "loads", "(", "body", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "body", ")" ]
Invite a user to the tenant.
[ "Invite", "a", "user", "to", "the", "tenant", "." ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L23-L36
245,803
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.revoke_user
def revoke_user(self, user_id): """ Revoke a user from the tenant This will remove pending or approved roles but will not not delete the user from Keystone. """ uri = 'openstack/users/%s' % user_id try: resp = self.delete(uri) except AttributeError: # note: this breaks. stacktask returns a string, not json. return self.expected_success(200, resp.status) return rest_client.ResponseBody(resp, None)
python
def revoke_user(self, user_id): """ Revoke a user from the tenant This will remove pending or approved roles but will not not delete the user from Keystone. """ uri = 'openstack/users/%s' % user_id try: resp = self.delete(uri) except AttributeError: # note: this breaks. stacktask returns a string, not json. return self.expected_success(200, resp.status) return rest_client.ResponseBody(resp, None)
[ "def", "revoke_user", "(", "self", ",", "user_id", ")", ":", "uri", "=", "'openstack/users/%s'", "%", "user_id", "try", ":", "resp", "=", "self", ".", "delete", "(", "uri", ")", "except", "AttributeError", ":", "# note: this breaks. stacktask returns a string, not json.", "return", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "None", ")" ]
Revoke a user from the tenant This will remove pending or approved roles but will not not delete the user from Keystone.
[ "Revoke", "a", "user", "from", "the", "tenant", "This", "will", "remove", "pending", "or", "approved", "roles", "but", "will", "not", "not", "delete", "the", "user", "from", "Keystone", "." ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L38-L51
245,804
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.get_tokens
def get_tokens(self, filters={}): """ Returns dict of tokens matching the provided filters """ uri = 'tokens' if filters: filters = {'filters': json.dumps(filters)} uri += "?%s" % urllib.urlencode(filters, True) resp, body = self.get(uri) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
python
def get_tokens(self, filters={}): """ Returns dict of tokens matching the provided filters """ uri = 'tokens' if filters: filters = {'filters': json.dumps(filters)} uri += "?%s" % urllib.urlencode(filters, True) resp, body = self.get(uri) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
[ "def", "get_tokens", "(", "self", ",", "filters", "=", "{", "}", ")", ":", "uri", "=", "'tokens'", "if", "filters", ":", "filters", "=", "{", "'filters'", ":", "json", ".", "dumps", "(", "filters", ")", "}", "uri", "+=", "\"?%s\"", "%", "urllib", ".", "urlencode", "(", "filters", ",", "True", ")", "resp", ",", "body", "=", "self", ".", "get", "(", "uri", ")", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "body", "=", "json", ".", "loads", "(", "body", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "body", ")" ]
Returns dict of tokens matching the provided filters
[ "Returns", "dict", "of", "tokens", "matching", "the", "provided", "filters" ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L53-L64
245,805
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.token_submit
def token_submit(self, token_id, json_data={}): """ Submits a given token, along with optional data """ uri = 'tokens/%s' % token_id post_body = json.dumps(json_data) resp, body = self.post(uri, post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
python
def token_submit(self, token_id, json_data={}): """ Submits a given token, along with optional data """ uri = 'tokens/%s' % token_id post_body = json.dumps(json_data) resp, body = self.post(uri, post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
[ "def", "token_submit", "(", "self", ",", "token_id", ",", "json_data", "=", "{", "}", ")", ":", "uri", "=", "'tokens/%s'", "%", "token_id", "post_body", "=", "json", ".", "dumps", "(", "json_data", ")", "resp", ",", "body", "=", "self", ".", "post", "(", "uri", ",", "post_body", ")", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "body", "=", "json", ".", "loads", "(", "body", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "body", ")" ]
Submits a given token, along with optional data
[ "Submits", "a", "given", "token", "along", "with", "optional", "data" ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L66-L74
245,806
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.approve_task
def approve_task(self, task_id): """ Returns dict of tasks matching the provided filters """ uri = 'tasks/%s' % task_id data = {"approved": True} resp, body = self.post(uri, json.dumps(data)) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
python
def approve_task(self, task_id): """ Returns dict of tasks matching the provided filters """ uri = 'tasks/%s' % task_id data = {"approved": True} resp, body = self.post(uri, json.dumps(data)) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
[ "def", "approve_task", "(", "self", ",", "task_id", ")", ":", "uri", "=", "'tasks/%s'", "%", "task_id", "data", "=", "{", "\"approved\"", ":", "True", "}", "resp", ",", "body", "=", "self", ".", "post", "(", "uri", ",", "json", ".", "dumps", "(", "data", ")", ")", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "body", "=", "json", ".", "loads", "(", "body", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "body", ")" ]
Returns dict of tasks matching the provided filters
[ "Returns", "dict", "of", "tasks", "matching", "the", "provided", "filters" ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L89-L98
245,807
catalyst/stacktask-tempest
stacktask_tempest_plugin/stacktask_client.py
StacktaskClient.signup
def signup(self, project_name, email): """ Signup for a new project. """ uri = 'openstack/sign-up' data = { "project_name": project_name, "email": email, } post_body = json.dumps(data) resp, body = self.post(uri, body=post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
python
def signup(self, project_name, email): """ Signup for a new project. """ uri = 'openstack/sign-up' data = { "project_name": project_name, "email": email, } post_body = json.dumps(data) resp, body = self.post(uri, body=post_body) self.expected_success(200, resp.status) body = json.loads(body) return rest_client.ResponseBody(resp, body)
[ "def", "signup", "(", "self", ",", "project_name", ",", "email", ")", ":", "uri", "=", "'openstack/sign-up'", "data", "=", "{", "\"project_name\"", ":", "project_name", ",", "\"email\"", ":", "email", ",", "}", "post_body", "=", "json", ".", "dumps", "(", "data", ")", "resp", ",", "body", "=", "self", ".", "post", "(", "uri", ",", "body", "=", "post_body", ")", "self", ".", "expected_success", "(", "200", ",", "resp", ".", "status", ")", "body", "=", "json", ".", "loads", "(", "body", ")", "return", "rest_client", ".", "ResponseBody", "(", "resp", ",", "body", ")" ]
Signup for a new project.
[ "Signup", "for", "a", "new", "project", "." ]
0171161338a0a02de1fe62b6baaebf3c58f44db6
https://github.com/catalyst/stacktask-tempest/blob/0171161338a0a02de1fe62b6baaebf3c58f44db6/stacktask_tempest_plugin/stacktask_client.py#L100-L112
245,808
akatrevorjay/uninhibited
uninhibited/dispatch.py
Dispatch.get_event
def get_event(self, name, default=_sentinel): """ Lookup an event by name. :param str item: Event name :return Event: Event instance under key """ if name not in self.events: if self.create_events_on_access: self.add_event(name) elif default is not _sentinel: return default return self.events[name]
python
def get_event(self, name, default=_sentinel): """ Lookup an event by name. :param str item: Event name :return Event: Event instance under key """ if name not in self.events: if self.create_events_on_access: self.add_event(name) elif default is not _sentinel: return default return self.events[name]
[ "def", "get_event", "(", "self", ",", "name", ",", "default", "=", "_sentinel", ")", ":", "if", "name", "not", "in", "self", ".", "events", ":", "if", "self", ".", "create_events_on_access", ":", "self", ".", "add_event", "(", "name", ")", "elif", "default", "is", "not", "_sentinel", ":", "return", "default", "return", "self", ".", "events", "[", "name", "]" ]
Lookup an event by name. :param str item: Event name :return Event: Event instance under key
[ "Lookup", "an", "event", "by", "name", "." ]
f23079fe61cf831fa274d3c60bda8076c571d3f1
https://github.com/akatrevorjay/uninhibited/blob/f23079fe61cf831fa274d3c60bda8076c571d3f1/uninhibited/dispatch.py#L116-L128
245,809
akatrevorjay/uninhibited
uninhibited/dispatch.py
Dispatch._add_internal_event
def _add_internal_event(self, name, send_event=False, internal_event_factory=None): """ This is only here to ensure my constant hatred for Python 2's horrid variable argument support. """ if not internal_event_factory: internal_event_factory = self.internal_event_factory return self.add_event(names, send_event=send_event, event_factory=internal_event_factory)
python
def _add_internal_event(self, name, send_event=False, internal_event_factory=None): """ This is only here to ensure my constant hatred for Python 2's horrid variable argument support. """ if not internal_event_factory: internal_event_factory = self.internal_event_factory return self.add_event(names, send_event=send_event, event_factory=internal_event_factory)
[ "def", "_add_internal_event", "(", "self", ",", "name", ",", "send_event", "=", "False", ",", "internal_event_factory", "=", "None", ")", ":", "if", "not", "internal_event_factory", ":", "internal_event_factory", "=", "self", ".", "internal_event_factory", "return", "self", ".", "add_event", "(", "names", ",", "send_event", "=", "send_event", ",", "event_factory", "=", "internal_event_factory", ")" ]
This is only here to ensure my constant hatred for Python 2's horrid variable argument support.
[ "This", "is", "only", "here", "to", "ensure", "my", "constant", "hatred", "for", "Python", "2", "s", "horrid", "variable", "argument", "support", "." ]
f23079fe61cf831fa274d3c60bda8076c571d3f1
https://github.com/akatrevorjay/uninhibited/blob/f23079fe61cf831fa274d3c60bda8076c571d3f1/uninhibited/dispatch.py#L154-L160
245,810
akatrevorjay/uninhibited
uninhibited/dispatch.py
Dispatch._attach_handler_events
def _attach_handler_events(self, handler, events=None): """ Search handler for methods named after events, attaching to event handlers as applicable. :param object handler: Handler instance :param list events: List of event names to look for. If not specified, will do all known event names. """ if not events: events = self for name in events: meth = getattr(handler, name, None) if meth: self.events[name] += meth
python
def _attach_handler_events(self, handler, events=None): """ Search handler for methods named after events, attaching to event handlers as applicable. :param object handler: Handler instance :param list events: List of event names to look for. If not specified, will do all known event names. """ if not events: events = self for name in events: meth = getattr(handler, name, None) if meth: self.events[name] += meth
[ "def", "_attach_handler_events", "(", "self", ",", "handler", ",", "events", "=", "None", ")", ":", "if", "not", "events", ":", "events", "=", "self", "for", "name", "in", "events", ":", "meth", "=", "getattr", "(", "handler", ",", "name", ",", "None", ")", "if", "meth", ":", "self", ".", "events", "[", "name", "]", "+=", "meth" ]
Search handler for methods named after events, attaching to event handlers as applicable. :param object handler: Handler instance :param list events: List of event names to look for. If not specified, will do all known event names.
[ "Search", "handler", "for", "methods", "named", "after", "events", "attaching", "to", "event", "handlers", "as", "applicable", "." ]
f23079fe61cf831fa274d3c60bda8076c571d3f1
https://github.com/akatrevorjay/uninhibited/blob/f23079fe61cf831fa274d3c60bda8076c571d3f1/uninhibited/dispatch.py#L204-L216
245,811
akatrevorjay/uninhibited
uninhibited/dispatch.py
Dispatch._remove
def _remove(self, handler, send_event=True): """ Remove handler instance and detach any methods bound to it from uninhibited. :param object handler: handler instance :return object: The handler you added is given back so this can be used as a decorator. """ for event in self: event.remove_handlers_bound_to_instance(handler) self.handlers.remove(handler) if send_event: self.on_handler_remove(handler)
python
def _remove(self, handler, send_event=True): """ Remove handler instance and detach any methods bound to it from uninhibited. :param object handler: handler instance :return object: The handler you added is given back so this can be used as a decorator. """ for event in self: event.remove_handlers_bound_to_instance(handler) self.handlers.remove(handler) if send_event: self.on_handler_remove(handler)
[ "def", "_remove", "(", "self", ",", "handler", ",", "send_event", "=", "True", ")", ":", "for", "event", "in", "self", ":", "event", ".", "remove_handlers_bound_to_instance", "(", "handler", ")", "self", ".", "handlers", ".", "remove", "(", "handler", ")", "if", "send_event", ":", "self", ".", "on_handler_remove", "(", "handler", ")" ]
Remove handler instance and detach any methods bound to it from uninhibited. :param object handler: handler instance :return object: The handler you added is given back so this can be used as a decorator.
[ "Remove", "handler", "instance", "and", "detach", "any", "methods", "bound", "to", "it", "from", "uninhibited", "." ]
f23079fe61cf831fa274d3c60bda8076c571d3f1
https://github.com/akatrevorjay/uninhibited/blob/f23079fe61cf831fa274d3c60bda8076c571d3f1/uninhibited/dispatch.py#L254-L265
245,812
akatrevorjay/uninhibited
uninhibited/dispatch.py
Dispatch.fire
def fire(self, event, *args, **kwargs): """ Fire event. call event's handlers using given arguments, return a list of results. :param str name: Event name :param tuple args: positional arguments to call each handler with :param dict kwargs: keyword arguments to call each handler with :return list: a list of tuples of handler, return value """ if not self._maybe_create_on_fire(event): return return self[event].fire(*args, **kwargs)
python
def fire(self, event, *args, **kwargs): """ Fire event. call event's handlers using given arguments, return a list of results. :param str name: Event name :param tuple args: positional arguments to call each handler with :param dict kwargs: keyword arguments to call each handler with :return list: a list of tuples of handler, return value """ if not self._maybe_create_on_fire(event): return return self[event].fire(*args, **kwargs)
[ "def", "fire", "(", "self", ",", "event", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "_maybe_create_on_fire", "(", "event", ")", ":", "return", "return", "self", "[", "event", "]", ".", "fire", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Fire event. call event's handlers using given arguments, return a list of results. :param str name: Event name :param tuple args: positional arguments to call each handler with :param dict kwargs: keyword arguments to call each handler with :return list: a list of tuples of handler, return value
[ "Fire", "event", ".", "call", "event", "s", "handlers", "using", "given", "arguments", "return", "a", "list", "of", "results", "." ]
f23079fe61cf831fa274d3c60bda8076c571d3f1
https://github.com/akatrevorjay/uninhibited/blob/f23079fe61cf831fa274d3c60bda8076c571d3f1/uninhibited/dispatch.py#L296-L307
245,813
akatrevorjay/uninhibited
uninhibited/dispatch.py
Dispatch.ifire
def ifire(self, event, *args, **kwargs): """ Iteratively fire event, returning generator. Calls each handler using given arguments, upon iteration, yielding each result. :param str name: Event name :param tuple args: positional arguments to call each handler with :param dict kwargs: keyword arguments to call each handler with :return generator: a generator yielding a tuple of handler, return value """ if not self._maybe_create_on_fire(event): return # Wrap the generator per item to force that this method be a generator # Python 3.x of course has yield from, which would be great here. # for x in self[event].ifire(*args, **kwargs) # yield x return self[event].ifire(*args, **kwargs)
python
def ifire(self, event, *args, **kwargs): """ Iteratively fire event, returning generator. Calls each handler using given arguments, upon iteration, yielding each result. :param str name: Event name :param tuple args: positional arguments to call each handler with :param dict kwargs: keyword arguments to call each handler with :return generator: a generator yielding a tuple of handler, return value """ if not self._maybe_create_on_fire(event): return # Wrap the generator per item to force that this method be a generator # Python 3.x of course has yield from, which would be great here. # for x in self[event].ifire(*args, **kwargs) # yield x return self[event].ifire(*args, **kwargs)
[ "def", "ifire", "(", "self", ",", "event", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "_maybe_create_on_fire", "(", "event", ")", ":", "return", "# Wrap the generator per item to force that this method be a generator", "# Python 3.x of course has yield from, which would be great here.", "# for x in self[event].ifire(*args, **kwargs)", "# yield x", "return", "self", "[", "event", "]", ".", "ifire", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Iteratively fire event, returning generator. Calls each handler using given arguments, upon iteration, yielding each result. :param str name: Event name :param tuple args: positional arguments to call each handler with :param dict kwargs: keyword arguments to call each handler with :return generator: a generator yielding a tuple of handler, return value
[ "Iteratively", "fire", "event", "returning", "generator", ".", "Calls", "each", "handler", "using", "given", "arguments", "upon", "iteration", "yielding", "each", "result", "." ]
f23079fe61cf831fa274d3c60bda8076c571d3f1
https://github.com/akatrevorjay/uninhibited/blob/f23079fe61cf831fa274d3c60bda8076c571d3f1/uninhibited/dispatch.py#L311-L328
245,814
dansackett/django-toolset
django_toolset/templatetags/custom_tags.py
active
def active(context, pattern_or_urlname, class_name='active', *args, **kwargs): """Based on a URL Pattern or name, determine if it is the current page. This is useful if you're creating a navigation component and want to give the active URL a specific class for UI purposes. It will accept a named URL or a regex pattern. If you have a URL which accepts args or kwargs then you may pass them into the tag and they will be picked up for matching as well. Usage: {% load custom_tags %} <li class="nav-home {% active 'url-name' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active '^/regex/' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active 'url-name' class_name='current' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active 'url-name' username=user.username %}"> <a href="#">Home</a> </li> """ request = context.dicts[1].get('request') try: pattern = '^%s$' % reverse(pattern_or_urlname, args=args, kwargs=kwargs) except NoReverseMatch: pattern = pattern_or_urlname if request and re.search(pattern, request.path): return class_name return ''
python
def active(context, pattern_or_urlname, class_name='active', *args, **kwargs): """Based on a URL Pattern or name, determine if it is the current page. This is useful if you're creating a navigation component and want to give the active URL a specific class for UI purposes. It will accept a named URL or a regex pattern. If you have a URL which accepts args or kwargs then you may pass them into the tag and they will be picked up for matching as well. Usage: {% load custom_tags %} <li class="nav-home {% active 'url-name' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active '^/regex/' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active 'url-name' class_name='current' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active 'url-name' username=user.username %}"> <a href="#">Home</a> </li> """ request = context.dicts[1].get('request') try: pattern = '^%s$' % reverse(pattern_or_urlname, args=args, kwargs=kwargs) except NoReverseMatch: pattern = pattern_or_urlname if request and re.search(pattern, request.path): return class_name return ''
[ "def", "active", "(", "context", ",", "pattern_or_urlname", ",", "class_name", "=", "'active'", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "request", "=", "context", ".", "dicts", "[", "1", "]", ".", "get", "(", "'request'", ")", "try", ":", "pattern", "=", "'^%s$'", "%", "reverse", "(", "pattern_or_urlname", ",", "args", "=", "args", ",", "kwargs", "=", "kwargs", ")", "except", "NoReverseMatch", ":", "pattern", "=", "pattern_or_urlname", "if", "request", "and", "re", ".", "search", "(", "pattern", ",", "request", ".", "path", ")", ":", "return", "class_name", "return", "''" ]
Based on a URL Pattern or name, determine if it is the current page. This is useful if you're creating a navigation component and want to give the active URL a specific class for UI purposes. It will accept a named URL or a regex pattern. If you have a URL which accepts args or kwargs then you may pass them into the tag and they will be picked up for matching as well. Usage: {% load custom_tags %} <li class="nav-home {% active 'url-name' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active '^/regex/' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active 'url-name' class_name='current' %}"> <a href="#">Home</a> </li> OR <li class="nav-home {% active 'url-name' username=user.username %}"> <a href="#">Home</a> </li>
[ "Based", "on", "a", "URL", "Pattern", "or", "name", "determine", "if", "it", "is", "the", "current", "page", "." ]
a28cc19e32cf41130e848c268d26c1858a7cf26a
https://github.com/dansackett/django-toolset/blob/a28cc19e32cf41130e848c268d26c1858a7cf26a/django_toolset/templatetags/custom_tags.py#L10-L57
245,815
ravenac95/subwrap
subwrap.py
run
def run(sub_command, exit_handle=None, **options): """Run a command""" command = Command(sub_command, exit_handle) return command.run(**options)
python
def run(sub_command, exit_handle=None, **options): """Run a command""" command = Command(sub_command, exit_handle) return command.run(**options)
[ "def", "run", "(", "sub_command", ",", "exit_handle", "=", "None", ",", "*", "*", "options", ")", ":", "command", "=", "Command", "(", "sub_command", ",", "exit_handle", ")", "return", "command", ".", "run", "(", "*", "*", "options", ")" ]
Run a command
[ "Run", "a", "command" ]
ba0d4898e537881a217ade6ce2a1cee45407aa62
https://github.com/ravenac95/subwrap/blob/ba0d4898e537881a217ade6ce2a1cee45407aa62/subwrap.py#L49-L52
245,816
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
__trim_extensions_dot
def __trim_extensions_dot(exts): """trim leading dots from extensions and drop any empty strings.""" if exts is None: return None res = [] for i in range(0, len(exts)): if exts[i] == "": continue res.append(__trim_extension_dot(exts[i])) return res
python
def __trim_extensions_dot(exts): """trim leading dots from extensions and drop any empty strings.""" if exts is None: return None res = [] for i in range(0, len(exts)): if exts[i] == "": continue res.append(__trim_extension_dot(exts[i])) return res
[ "def", "__trim_extensions_dot", "(", "exts", ")", ":", "if", "exts", "is", "None", ":", "return", "None", "res", "=", "[", "]", "for", "i", "in", "range", "(", "0", ",", "len", "(", "exts", ")", ")", ":", "if", "exts", "[", "i", "]", "==", "\"\"", ":", "continue", "res", ".", "append", "(", "__trim_extension_dot", "(", "exts", "[", "i", "]", ")", ")", "return", "res" ]
trim leading dots from extensions and drop any empty strings.
[ "trim", "leading", "dots", "from", "extensions", "and", "drop", "any", "empty", "strings", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L63-L72
245,817
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
load_just_in_time_genome_alignment
def load_just_in_time_genome_alignment(path, ref_spec, extensions=None, index_exts=None, fail_no_index=True, verbose=False): """Load a just-in-time genome alignment from a directory.""" if index_exts is None and fail_no_index: raise ValueError("Failure on no index specified for loading genome " + "alignment, but no index extensions specified") extensions = __trim_extensions_dot(extensions) index_exts = __trim_extensions_dot(index_exts) partial_chrom_files = {} whole_chrom_files = {} for fn in os.listdir(path): pth = os.path.join(path, fn) if os.path.isfile(pth): base, ext = os.path.splitext(pth) ext = __trim_extension_dot(ext) if extensions is None or ext in extensions: idx_path = __find_index(pth, index_exts) if idx_path is None and fail_no_index: raise PyokitIOError("No index file for " + fn) chrom, start, end = __split_genomic_interval_filename(fn) assert((start is None and end is None) or (start is not None and end is not None)) if start is None: if chrom in whole_chrom_files: raise PyokitIOError("multiple files for chrom " + chrom) whole_chrom_files[chrom] = (pth, idx_path) else: k = (chrom, start, end) if k in partial_chrom_files: other_pth = partial_chrom_files[k][0] raise PyokitIOError("multiple files for " + str(k) + " --> " + pth + " and " + other_pth) partial_chrom_files[k] = (pth, idx_path) def factory(k): pth, idx = k return build_genome_alignment_from_file(pth, ref_spec, idx, verbose) return JustInTimeGenomeAlignment(whole_chrom_files, partial_chrom_files, factory)
python
def load_just_in_time_genome_alignment(path, ref_spec, extensions=None, index_exts=None, fail_no_index=True, verbose=False): """Load a just-in-time genome alignment from a directory.""" if index_exts is None and fail_no_index: raise ValueError("Failure on no index specified for loading genome " + "alignment, but no index extensions specified") extensions = __trim_extensions_dot(extensions) index_exts = __trim_extensions_dot(index_exts) partial_chrom_files = {} whole_chrom_files = {} for fn in os.listdir(path): pth = os.path.join(path, fn) if os.path.isfile(pth): base, ext = os.path.splitext(pth) ext = __trim_extension_dot(ext) if extensions is None or ext in extensions: idx_path = __find_index(pth, index_exts) if idx_path is None and fail_no_index: raise PyokitIOError("No index file for " + fn) chrom, start, end = __split_genomic_interval_filename(fn) assert((start is None and end is None) or (start is not None and end is not None)) if start is None: if chrom in whole_chrom_files: raise PyokitIOError("multiple files for chrom " + chrom) whole_chrom_files[chrom] = (pth, idx_path) else: k = (chrom, start, end) if k in partial_chrom_files: other_pth = partial_chrom_files[k][0] raise PyokitIOError("multiple files for " + str(k) + " --> " + pth + " and " + other_pth) partial_chrom_files[k] = (pth, idx_path) def factory(k): pth, idx = k return build_genome_alignment_from_file(pth, ref_spec, idx, verbose) return JustInTimeGenomeAlignment(whole_chrom_files, partial_chrom_files, factory)
[ "def", "load_just_in_time_genome_alignment", "(", "path", ",", "ref_spec", ",", "extensions", "=", "None", ",", "index_exts", "=", "None", ",", "fail_no_index", "=", "True", ",", "verbose", "=", "False", ")", ":", "if", "index_exts", "is", "None", "and", "fail_no_index", ":", "raise", "ValueError", "(", "\"Failure on no index specified for loading genome \"", "+", "\"alignment, but no index extensions specified\"", ")", "extensions", "=", "__trim_extensions_dot", "(", "extensions", ")", "index_exts", "=", "__trim_extensions_dot", "(", "index_exts", ")", "partial_chrom_files", "=", "{", "}", "whole_chrom_files", "=", "{", "}", "for", "fn", "in", "os", ".", "listdir", "(", "path", ")", ":", "pth", "=", "os", ".", "path", ".", "join", "(", "path", ",", "fn", ")", "if", "os", ".", "path", ".", "isfile", "(", "pth", ")", ":", "base", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "pth", ")", "ext", "=", "__trim_extension_dot", "(", "ext", ")", "if", "extensions", "is", "None", "or", "ext", "in", "extensions", ":", "idx_path", "=", "__find_index", "(", "pth", ",", "index_exts", ")", "if", "idx_path", "is", "None", "and", "fail_no_index", ":", "raise", "PyokitIOError", "(", "\"No index file for \"", "+", "fn", ")", "chrom", ",", "start", ",", "end", "=", "__split_genomic_interval_filename", "(", "fn", ")", "assert", "(", "(", "start", "is", "None", "and", "end", "is", "None", ")", "or", "(", "start", "is", "not", "None", "and", "end", "is", "not", "None", ")", ")", "if", "start", "is", "None", ":", "if", "chrom", "in", "whole_chrom_files", ":", "raise", "PyokitIOError", "(", "\"multiple files for chrom \"", "+", "chrom", ")", "whole_chrom_files", "[", "chrom", "]", "=", "(", "pth", ",", "idx_path", ")", "else", ":", "k", "=", "(", "chrom", ",", "start", ",", "end", ")", "if", "k", "in", "partial_chrom_files", ":", "other_pth", "=", "partial_chrom_files", "[", "k", "]", "[", "0", "]", "raise", "PyokitIOError", "(", "\"multiple files for \"", "+", "str", "(", "k", ")", "+", "\" --> \"", "+", "pth", "+", "\" and \"", "+", "other_pth", ")", "partial_chrom_files", "[", "k", "]", "=", "(", "pth", ",", "idx_path", ")", "def", "factory", "(", "k", ")", ":", "pth", ",", "idx", "=", "k", "return", "build_genome_alignment_from_file", "(", "pth", ",", "ref_spec", ",", "idx", ",", "verbose", ")", "return", "JustInTimeGenomeAlignment", "(", "whole_chrom_files", ",", "partial_chrom_files", ",", "factory", ")" ]
Load a just-in-time genome alignment from a directory.
[ "Load", "a", "just", "-", "in", "-", "time", "genome", "alignment", "from", "a", "directory", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L99-L141
245,818
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
__find_index
def __find_index(alig_file_pth, idx_extensions): """ Find an index file for a genome alignment file in the same directory. :param alig_file_path: path to the alignment file. :param idx_extensions: check for index files with these extensions :return: path to first index file that matches the name of the alignment file and has one of the specified extensions. """ if idx_extensions is None: return None base, _ = os.path.splitext(alig_file_pth) for idx_ext in idx_extensions: candidate = base + os.extsep + idx_ext if os.path.isfile(candidate): return candidate return None
python
def __find_index(alig_file_pth, idx_extensions): """ Find an index file for a genome alignment file in the same directory. :param alig_file_path: path to the alignment file. :param idx_extensions: check for index files with these extensions :return: path to first index file that matches the name of the alignment file and has one of the specified extensions. """ if idx_extensions is None: return None base, _ = os.path.splitext(alig_file_pth) for idx_ext in idx_extensions: candidate = base + os.extsep + idx_ext if os.path.isfile(candidate): return candidate return None
[ "def", "__find_index", "(", "alig_file_pth", ",", "idx_extensions", ")", ":", "if", "idx_extensions", "is", "None", ":", "return", "None", "base", ",", "_", "=", "os", ".", "path", ".", "splitext", "(", "alig_file_pth", ")", "for", "idx_ext", "in", "idx_extensions", ":", "candidate", "=", "base", "+", "os", ".", "extsep", "+", "idx_ext", "if", "os", ".", "path", ".", "isfile", "(", "candidate", ")", ":", "return", "candidate", "return", "None" ]
Find an index file for a genome alignment file in the same directory. :param alig_file_path: path to the alignment file. :param idx_extensions: check for index files with these extensions :return: path to first index file that matches the name of the alignment file and has one of the specified extensions.
[ "Find", "an", "index", "file", "for", "a", "genome", "alignment", "file", "in", "the", "same", "directory", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L148-L164
245,819
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
build_genome_alignment_from_directory
def build_genome_alignment_from_directory(d_name, ref_spec, extensions=None, index_exts=None, fail_no_index=False): """ build a genome aligment by loading all files in a directory. Fiel without indexes are loaded immediately; those with indexes are loaded on-demand. Not recursive (i.e. subdirectories are not parsed). :param d_name: directory to load from. :param ref_spec: which species in the alignemnt files is the reference? :param extensions: list or set of acceptable extensions; treat any files with these extensions as part of the alignment. If None, treat any file which has an extension that is NOT in index_extensions as part of the alignment. :param index_exts: treat any files with these extensions as index files. :param fail_no_index: fail if index extensions are provided and an alignment file has no index file. """ if index_exts is None and fail_no_index: raise ValueError("Failure on no index specified for loading genome " + "alignment, but no index extensions specified") blocks = [] for fn in os.listdir(d_name): pth = os.path.join(d_name, fn) if os.path.isfile(pth): _, ext = os.path.splitext(pth) if extensions is None or ext in extensions: idx_path = __find_index(pth, index_exts) if idx_path is None and fail_no_index: raise PyokitIOError("No index file for " + fn) for b in genome_alignment_iterator(pth, ref_spec, idx_path): blocks.append(b) return GenomeAlignment(blocks)
python
def build_genome_alignment_from_directory(d_name, ref_spec, extensions=None, index_exts=None, fail_no_index=False): """ build a genome aligment by loading all files in a directory. Fiel without indexes are loaded immediately; those with indexes are loaded on-demand. Not recursive (i.e. subdirectories are not parsed). :param d_name: directory to load from. :param ref_spec: which species in the alignemnt files is the reference? :param extensions: list or set of acceptable extensions; treat any files with these extensions as part of the alignment. If None, treat any file which has an extension that is NOT in index_extensions as part of the alignment. :param index_exts: treat any files with these extensions as index files. :param fail_no_index: fail if index extensions are provided and an alignment file has no index file. """ if index_exts is None and fail_no_index: raise ValueError("Failure on no index specified for loading genome " + "alignment, but no index extensions specified") blocks = [] for fn in os.listdir(d_name): pth = os.path.join(d_name, fn) if os.path.isfile(pth): _, ext = os.path.splitext(pth) if extensions is None or ext in extensions: idx_path = __find_index(pth, index_exts) if idx_path is None and fail_no_index: raise PyokitIOError("No index file for " + fn) for b in genome_alignment_iterator(pth, ref_spec, idx_path): blocks.append(b) return GenomeAlignment(blocks)
[ "def", "build_genome_alignment_from_directory", "(", "d_name", ",", "ref_spec", ",", "extensions", "=", "None", ",", "index_exts", "=", "None", ",", "fail_no_index", "=", "False", ")", ":", "if", "index_exts", "is", "None", "and", "fail_no_index", ":", "raise", "ValueError", "(", "\"Failure on no index specified for loading genome \"", "+", "\"alignment, but no index extensions specified\"", ")", "blocks", "=", "[", "]", "for", "fn", "in", "os", ".", "listdir", "(", "d_name", ")", ":", "pth", "=", "os", ".", "path", ".", "join", "(", "d_name", ",", "fn", ")", "if", "os", ".", "path", ".", "isfile", "(", "pth", ")", ":", "_", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "pth", ")", "if", "extensions", "is", "None", "or", "ext", "in", "extensions", ":", "idx_path", "=", "__find_index", "(", "pth", ",", "index_exts", ")", "if", "idx_path", "is", "None", "and", "fail_no_index", ":", "raise", "PyokitIOError", "(", "\"No index file for \"", "+", "fn", ")", "for", "b", "in", "genome_alignment_iterator", "(", "pth", ",", "ref_spec", ",", "idx_path", ")", ":", "blocks", ".", "append", "(", "b", ")", "return", "GenomeAlignment", "(", "blocks", ")" ]
build a genome aligment by loading all files in a directory. Fiel without indexes are loaded immediately; those with indexes are loaded on-demand. Not recursive (i.e. subdirectories are not parsed). :param d_name: directory to load from. :param ref_spec: which species in the alignemnt files is the reference? :param extensions: list or set of acceptable extensions; treat any files with these extensions as part of the alignment. If None, treat any file which has an extension that is NOT in index_extensions as part of the alignment. :param index_exts: treat any files with these extensions as index files. :param fail_no_index: fail if index extensions are provided and an alignment file has no index file.
[ "build", "a", "genome", "aligment", "by", "loading", "all", "files", "in", "a", "directory", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L167-L201
245,820
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
build_genome_alignment_from_file
def build_genome_alignment_from_file(ga_path, ref_spec, idx_path=None, verbose=False): """ build a genome alignment by loading from a single MAF file. :param ga_path: the path to the file to load. :param ref_spec: which species in the MAF file is the reference? :param idx_path: if provided, use this index to generate a just-in-time genome alignment, instead of loading the file immediately. """ blocks = [] if (idx_path is not None): bound_iter = functools.partial(genome_alignment_iterator, reference_species=ref_spec) hash_func = JustInTimeGenomeAlignmentBlock.build_hash factory = IndexedFile(None, bound_iter, hash_func) factory.read_index(idx_path, ga_path, verbose=verbose) pind = None for k in factory: if verbose: if pind is None: total = len(factory) pind = ProgressIndicator(totalToDo=total, messagePrefix="completed", messageSuffix="building alignment blocks ") pind.done += 1 pind.showProgress() blocks.append(JustInTimeGenomeAlignmentBlock(factory, k)) else: for b in genome_alignment_iterator(ga_path, ref_spec, verbose=verbose): blocks.append(b) return GenomeAlignment(blocks, verbose)
python
def build_genome_alignment_from_file(ga_path, ref_spec, idx_path=None, verbose=False): """ build a genome alignment by loading from a single MAF file. :param ga_path: the path to the file to load. :param ref_spec: which species in the MAF file is the reference? :param idx_path: if provided, use this index to generate a just-in-time genome alignment, instead of loading the file immediately. """ blocks = [] if (idx_path is not None): bound_iter = functools.partial(genome_alignment_iterator, reference_species=ref_spec) hash_func = JustInTimeGenomeAlignmentBlock.build_hash factory = IndexedFile(None, bound_iter, hash_func) factory.read_index(idx_path, ga_path, verbose=verbose) pind = None for k in factory: if verbose: if pind is None: total = len(factory) pind = ProgressIndicator(totalToDo=total, messagePrefix="completed", messageSuffix="building alignment blocks ") pind.done += 1 pind.showProgress() blocks.append(JustInTimeGenomeAlignmentBlock(factory, k)) else: for b in genome_alignment_iterator(ga_path, ref_spec, verbose=verbose): blocks.append(b) return GenomeAlignment(blocks, verbose)
[ "def", "build_genome_alignment_from_file", "(", "ga_path", ",", "ref_spec", ",", "idx_path", "=", "None", ",", "verbose", "=", "False", ")", ":", "blocks", "=", "[", "]", "if", "(", "idx_path", "is", "not", "None", ")", ":", "bound_iter", "=", "functools", ".", "partial", "(", "genome_alignment_iterator", ",", "reference_species", "=", "ref_spec", ")", "hash_func", "=", "JustInTimeGenomeAlignmentBlock", ".", "build_hash", "factory", "=", "IndexedFile", "(", "None", ",", "bound_iter", ",", "hash_func", ")", "factory", ".", "read_index", "(", "idx_path", ",", "ga_path", ",", "verbose", "=", "verbose", ")", "pind", "=", "None", "for", "k", "in", "factory", ":", "if", "verbose", ":", "if", "pind", "is", "None", ":", "total", "=", "len", "(", "factory", ")", "pind", "=", "ProgressIndicator", "(", "totalToDo", "=", "total", ",", "messagePrefix", "=", "\"completed\"", ",", "messageSuffix", "=", "\"building alignment blocks \"", ")", "pind", ".", "done", "+=", "1", "pind", ".", "showProgress", "(", ")", "blocks", ".", "append", "(", "JustInTimeGenomeAlignmentBlock", "(", "factory", ",", "k", ")", ")", "else", ":", "for", "b", "in", "genome_alignment_iterator", "(", "ga_path", ",", "ref_spec", ",", "verbose", "=", "verbose", ")", ":", "blocks", ".", "append", "(", "b", ")", "return", "GenomeAlignment", "(", "blocks", ",", "verbose", ")" ]
build a genome alignment by loading from a single MAF file. :param ga_path: the path to the file to load. :param ref_spec: which species in the MAF file is the reference? :param idx_path: if provided, use this index to generate a just-in-time genome alignment, instead of loading the file immediately.
[ "build", "a", "genome", "alignment", "by", "loading", "from", "a", "single", "MAF", "file", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L204-L235
245,821
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
genome_alignment_iterator
def genome_alignment_iterator(fn, reference_species, index_friendly=False, verbose=False): """ build an iterator for an MAF file of genome alignment blocks. :param fn: filename or stream-like object to iterate over. :param reference_species: which species in the alignment should be treated as the reference? :param index_friendly: if True, buffering is disabled to support using the iterator to build an index. :return an iterator that yields GenomeAlignment objects """ kw_args = {"reference_species": reference_species} for e in maf.maf_iterator(fn, index_friendly=index_friendly, yield_class=GenomeAlignmentBlock, yield_kw_args=kw_args, verbose=verbose): yield e
python
def genome_alignment_iterator(fn, reference_species, index_friendly=False, verbose=False): """ build an iterator for an MAF file of genome alignment blocks. :param fn: filename or stream-like object to iterate over. :param reference_species: which species in the alignment should be treated as the reference? :param index_friendly: if True, buffering is disabled to support using the iterator to build an index. :return an iterator that yields GenomeAlignment objects """ kw_args = {"reference_species": reference_species} for e in maf.maf_iterator(fn, index_friendly=index_friendly, yield_class=GenomeAlignmentBlock, yield_kw_args=kw_args, verbose=verbose): yield e
[ "def", "genome_alignment_iterator", "(", "fn", ",", "reference_species", ",", "index_friendly", "=", "False", ",", "verbose", "=", "False", ")", ":", "kw_args", "=", "{", "\"reference_species\"", ":", "reference_species", "}", "for", "e", "in", "maf", ".", "maf_iterator", "(", "fn", ",", "index_friendly", "=", "index_friendly", ",", "yield_class", "=", "GenomeAlignmentBlock", ",", "yield_kw_args", "=", "kw_args", ",", "verbose", "=", "verbose", ")", ":", "yield", "e" ]
build an iterator for an MAF file of genome alignment blocks. :param fn: filename or stream-like object to iterate over. :param reference_species: which species in the alignment should be treated as the reference? :param index_friendly: if True, buffering is disabled to support using the iterator to build an index. :return an iterator that yields GenomeAlignment objects
[ "build", "an", "iterator", "for", "an", "MAF", "file", "of", "genome", "alignment", "blocks", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L242-L260
245,822
pjuren/pyokit
src/pyokit/io/genomeAlignment.py
_build_index
def _build_index(maf_strm, ref_spec): """Build an index for a MAF genome alig file and return StringIO of it.""" idx_strm = StringIO.StringIO() bound_iter = functools.partial(genome_alignment_iterator, reference_species=ref_spec) hash_func = JustInTimeGenomeAlignmentBlock.build_hash idx = IndexedFile(maf_strm, bound_iter, hash_func) idx.write_index(idx_strm) idx_strm.seek(0) # seek to the start return idx_strm
python
def _build_index(maf_strm, ref_spec): """Build an index for a MAF genome alig file and return StringIO of it.""" idx_strm = StringIO.StringIO() bound_iter = functools.partial(genome_alignment_iterator, reference_species=ref_spec) hash_func = JustInTimeGenomeAlignmentBlock.build_hash idx = IndexedFile(maf_strm, bound_iter, hash_func) idx.write_index(idx_strm) idx_strm.seek(0) # seek to the start return idx_strm
[ "def", "_build_index", "(", "maf_strm", ",", "ref_spec", ")", ":", "idx_strm", "=", "StringIO", ".", "StringIO", "(", ")", "bound_iter", "=", "functools", ".", "partial", "(", "genome_alignment_iterator", ",", "reference_species", "=", "ref_spec", ")", "hash_func", "=", "JustInTimeGenomeAlignmentBlock", ".", "build_hash", "idx", "=", "IndexedFile", "(", "maf_strm", ",", "bound_iter", ",", "hash_func", ")", "idx", ".", "write_index", "(", "idx_strm", ")", "idx_strm", ".", "seek", "(", "0", ")", "# seek to the start", "return", "idx_strm" ]
Build an index for a MAF genome alig file and return StringIO of it.
[ "Build", "an", "index", "for", "a", "MAF", "genome", "alig", "file", "and", "return", "StringIO", "of", "it", "." ]
fddae123b5d817daa39496183f19c000d9c3791f
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/genomeAlignment.py#L297-L306
245,823
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
ScanResults.update
def update(self, addr, raw_addr, name=None, rssi=None): """Updates the collection of results with a newly received scan response. Args: addr (str): Device hardware address in xx:xx:xx:xx:xx:xx format. raw_addr (bytearray): Device hardware address as raw bytes. name (str): Device name (if available) as ASCII text. May be None. rssi (float): Latest RSSI from the scan result for the device. May be 0. Returns: True if an existing device was updated, False if a new entry was created. """ if addr in self._devices: # logger.debug('UPDATE scan result: {} / {}'.format(addr, name)) self._devices[addr].update(name, rssi) return False else: self._devices[addr] = ScanResult(addr, raw_addr, name, rssi) logger.debug('Scan result: {} / {}'.format(addr, name)) return True
python
def update(self, addr, raw_addr, name=None, rssi=None): """Updates the collection of results with a newly received scan response. Args: addr (str): Device hardware address in xx:xx:xx:xx:xx:xx format. raw_addr (bytearray): Device hardware address as raw bytes. name (str): Device name (if available) as ASCII text. May be None. rssi (float): Latest RSSI from the scan result for the device. May be 0. Returns: True if an existing device was updated, False if a new entry was created. """ if addr in self._devices: # logger.debug('UPDATE scan result: {} / {}'.format(addr, name)) self._devices[addr].update(name, rssi) return False else: self._devices[addr] = ScanResult(addr, raw_addr, name, rssi) logger.debug('Scan result: {} / {}'.format(addr, name)) return True
[ "def", "update", "(", "self", ",", "addr", ",", "raw_addr", ",", "name", "=", "None", ",", "rssi", "=", "None", ")", ":", "if", "addr", "in", "self", ".", "_devices", ":", "# logger.debug('UPDATE scan result: {} / {}'.format(addr, name))", "self", ".", "_devices", "[", "addr", "]", ".", "update", "(", "name", ",", "rssi", ")", "return", "False", "else", ":", "self", ".", "_devices", "[", "addr", "]", "=", "ScanResult", "(", "addr", ",", "raw_addr", ",", "name", ",", "rssi", ")", "logger", ".", "debug", "(", "'Scan result: {} / {}'", ".", "format", "(", "addr", ",", "name", ")", ")", "return", "True" ]
Updates the collection of results with a newly received scan response. Args: addr (str): Device hardware address in xx:xx:xx:xx:xx:xx format. raw_addr (bytearray): Device hardware address as raw bytes. name (str): Device name (if available) as ASCII text. May be None. rssi (float): Latest RSSI from the scan result for the device. May be 0. Returns: True if an existing device was updated, False if a new entry was created.
[ "Updates", "the", "collection", "of", "results", "with", "a", "newly", "received", "scan", "response", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L107-L126
245,824
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.set_calibration
def set_calibration(self, enabled, imus): """Set calibration state for attached IMUs. Args: enabled (bool): True to apply calibration to IMU data (if available). False to output uncalibrated data. imus (list): indicates which IMUs the calibration state should be set on. Empty list or [0, 1, 2, 3, 4] will apply to all IMUs, [0, 1] only to first 2 IMUs, etc. """ if len(imus) == 0: imus = list(range(MAX_IMUS)) for i in imus: if i < 0 or i >= MAX_IMUS: logger.warn('Invalid IMU index {} in set_calibration'.format(i)) continue self.imus[i]._use_calibration = enabled
python
def set_calibration(self, enabled, imus): """Set calibration state for attached IMUs. Args: enabled (bool): True to apply calibration to IMU data (if available). False to output uncalibrated data. imus (list): indicates which IMUs the calibration state should be set on. Empty list or [0, 1, 2, 3, 4] will apply to all IMUs, [0, 1] only to first 2 IMUs, etc. """ if len(imus) == 0: imus = list(range(MAX_IMUS)) for i in imus: if i < 0 or i >= MAX_IMUS: logger.warn('Invalid IMU index {} in set_calibration'.format(i)) continue self.imus[i]._use_calibration = enabled
[ "def", "set_calibration", "(", "self", ",", "enabled", ",", "imus", ")", ":", "if", "len", "(", "imus", ")", "==", "0", ":", "imus", "=", "list", "(", "range", "(", "MAX_IMUS", ")", ")", "for", "i", "in", "imus", ":", "if", "i", "<", "0", "or", "i", ">=", "MAX_IMUS", ":", "logger", ".", "warn", "(", "'Invalid IMU index {} in set_calibration'", ".", "format", "(", "i", ")", ")", "continue", "self", ".", "imus", "[", "i", "]", ".", "_use_calibration", "=", "enabled" ]
Set calibration state for attached IMUs. Args: enabled (bool): True to apply calibration to IMU data (if available). False to output uncalibrated data. imus (list): indicates which IMUs the calibration state should be set on. Empty list or [0, 1, 2, 3, 4] will apply to all IMUs, [0, 1] only to first 2 IMUs, etc.
[ "Set", "calibration", "state", "for", "attached", "IMUs", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L240-L257
245,825
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.disconnect
def disconnect(self): """Disconnect the dongle from this SK8. Simply closes the active BLE connection to the device represented by the current instance. Returns: bool. True if connection was closed, False if not (e.g. if already closed). """ result = False logger.debug('SK8.disconnect({})'.format(self.conn_handle)) if self.conn_handle >= 0: logger.debug('Calling dongle disconnect') result = self.dongle._disconnect(self.conn_handle) self.conn_handle = -1 self.packets = 0 return result
python
def disconnect(self): """Disconnect the dongle from this SK8. Simply closes the active BLE connection to the device represented by the current instance. Returns: bool. True if connection was closed, False if not (e.g. if already closed). """ result = False logger.debug('SK8.disconnect({})'.format(self.conn_handle)) if self.conn_handle >= 0: logger.debug('Calling dongle disconnect') result = self.dongle._disconnect(self.conn_handle) self.conn_handle = -1 self.packets = 0 return result
[ "def", "disconnect", "(", "self", ")", ":", "result", "=", "False", "logger", ".", "debug", "(", "'SK8.disconnect({})'", ".", "format", "(", "self", ".", "conn_handle", ")", ")", "if", "self", ".", "conn_handle", ">=", "0", ":", "logger", ".", "debug", "(", "'Calling dongle disconnect'", ")", "result", "=", "self", ".", "dongle", ".", "_disconnect", "(", "self", ".", "conn_handle", ")", "self", ".", "conn_handle", "=", "-", "1", "self", ".", "packets", "=", "0", "return", "result" ]
Disconnect the dongle from this SK8. Simply closes the active BLE connection to the device represented by the current instance. Returns: bool. True if connection was closed, False if not (e.g. if already closed).
[ "Disconnect", "the", "dongle", "from", "this", "SK8", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L300-L316
245,826
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.set_extana_callback
def set_extana_callback(self, callback, data=None): """Register a callback for incoming data packets from the SK8-ExtAna board. This method allows you to pass in a callable which will be called on receipt of each packet sent from the SK8-ExtAna board. Set to `None` to disable it again. Args: callback: a callable with the following signature: (ana1, ana2, temp, seq, timestamp, data) where: ana1, ana2 = current values of the two analogue inputs temp = temperature sensor reading seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback """ self.extana_callback = callback self.extana_callback_data = data
python
def set_extana_callback(self, callback, data=None): """Register a callback for incoming data packets from the SK8-ExtAna board. This method allows you to pass in a callable which will be called on receipt of each packet sent from the SK8-ExtAna board. Set to `None` to disable it again. Args: callback: a callable with the following signature: (ana1, ana2, temp, seq, timestamp, data) where: ana1, ana2 = current values of the two analogue inputs temp = temperature sensor reading seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback """ self.extana_callback = callback self.extana_callback_data = data
[ "def", "set_extana_callback", "(", "self", ",", "callback", ",", "data", "=", "None", ")", ":", "self", ".", "extana_callback", "=", "callback", "self", ".", "extana_callback_data", "=", "data" ]
Register a callback for incoming data packets from the SK8-ExtAna board. This method allows you to pass in a callable which will be called on receipt of each packet sent from the SK8-ExtAna board. Set to `None` to disable it again. Args: callback: a callable with the following signature: (ana1, ana2, temp, seq, timestamp, data) where: ana1, ana2 = current values of the two analogue inputs temp = temperature sensor reading seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback
[ "Register", "a", "callback", "for", "incoming", "data", "packets", "from", "the", "SK8", "-", "ExtAna", "board", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L318-L338
245,827
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.enable_extana_streaming
def enable_extana_streaming(self, include_imu=False, enabled_sensors=SENSOR_ALL): """Configures and enables sensor data streaming from the SK8-ExtAna device. By default this will cause the SK8 to only stream data from the analog sensors on the SK8-ExtAna, but if `include_imu` is set to True, it will also send data from the internal IMU in the SK8. NOTE: only one streaming mode can be active at any time, so e.g. if you want to stream IMU data normally, you must disable SK8-ExtAna streaming first. Args: include_imu (bool): If False, only SK8-ExtAna packets will be streamed. If True, the device will also stream data from the SK8's internal IMU. enabled_sensors (int): If `include_imu` is True, this can be used to select which IMU sensors will be active. Returns: bool. True if successful, False if an error occurred. """ if not self.dongle._enable_extana_streaming(self, include_imu, enabled_sensors): logger.warn('Failed to enable SK8-ExtAna streaming!') return False # have to add IMU #0 to enabled_imus if include_imu is True if include_imu: self.enabled_imus = [0] return True
python
def enable_extana_streaming(self, include_imu=False, enabled_sensors=SENSOR_ALL): """Configures and enables sensor data streaming from the SK8-ExtAna device. By default this will cause the SK8 to only stream data from the analog sensors on the SK8-ExtAna, but if `include_imu` is set to True, it will also send data from the internal IMU in the SK8. NOTE: only one streaming mode can be active at any time, so e.g. if you want to stream IMU data normally, you must disable SK8-ExtAna streaming first. Args: include_imu (bool): If False, only SK8-ExtAna packets will be streamed. If True, the device will also stream data from the SK8's internal IMU. enabled_sensors (int): If `include_imu` is True, this can be used to select which IMU sensors will be active. Returns: bool. True if successful, False if an error occurred. """ if not self.dongle._enable_extana_streaming(self, include_imu, enabled_sensors): logger.warn('Failed to enable SK8-ExtAna streaming!') return False # have to add IMU #0 to enabled_imus if include_imu is True if include_imu: self.enabled_imus = [0] return True
[ "def", "enable_extana_streaming", "(", "self", ",", "include_imu", "=", "False", ",", "enabled_sensors", "=", "SENSOR_ALL", ")", ":", "if", "not", "self", ".", "dongle", ".", "_enable_extana_streaming", "(", "self", ",", "include_imu", ",", "enabled_sensors", ")", ":", "logger", ".", "warn", "(", "'Failed to enable SK8-ExtAna streaming!'", ")", "return", "False", "# have to add IMU #0 to enabled_imus if include_imu is True", "if", "include_imu", ":", "self", ".", "enabled_imus", "=", "[", "0", "]", "return", "True" ]
Configures and enables sensor data streaming from the SK8-ExtAna device. By default this will cause the SK8 to only stream data from the analog sensors on the SK8-ExtAna, but if `include_imu` is set to True, it will also send data from the internal IMU in the SK8. NOTE: only one streaming mode can be active at any time, so e.g. if you want to stream IMU data normally, you must disable SK8-ExtAna streaming first. Args: include_imu (bool): If False, only SK8-ExtAna packets will be streamed. If True, the device will also stream data from the SK8's internal IMU. enabled_sensors (int): If `include_imu` is True, this can be used to select which IMU sensors will be active. Returns: bool. True if successful, False if an error occurred.
[ "Configures", "and", "enables", "sensor", "data", "streaming", "from", "the", "SK8", "-", "ExtAna", "device", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L340-L367
245,828
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.set_extana_led
def set_extana_led(self, r, g, b, check_state=True): """Update the colour of the RGB LED on the SK8-ExtAna board. Args: r (int): red channel, 0-255 g (int): green channel, 0-255 b (int): blue channel, 0-255 check_state (bool): if True (default) and the locally cached LED state matches the given (r, g, b) triplet, pysk8 will NOT send any LED update command to the SK8. If you want to force the command to be sent even if the local state matches the new colour, set this to False. Returns: True on success, False if an error occurred. """ r, g, b = map(int, [r, g, b]) if min([r, g, b]) < LED_MIN or max([r, g, b]) > LED_MAX: logger.warn('RGB channel values must be {}-{}'.format(LED_MIN, LED_MAX)) return False if check_state and (r, g, b) == self.led_state: return True # internally range is 0-3000 ir, ig, ib = map(lambda x: int(x * (INT_LED_MAX / LED_MAX)), [r, g, b]) val = struct.pack('<HHH', ir, ig, ib) extana_led = self.get_characteristic_handle_from_uuid(UUID_EXTANA_LED) if extana_led is None: logger.warn('Failed to find handle for ExtAna LED') return None if not self.dongle._write_attribute(self.conn_handle, extana_led, val): return False # update cached LED state if successful self.led_state = (r, g, b) return True
python
def set_extana_led(self, r, g, b, check_state=True): """Update the colour of the RGB LED on the SK8-ExtAna board. Args: r (int): red channel, 0-255 g (int): green channel, 0-255 b (int): blue channel, 0-255 check_state (bool): if True (default) and the locally cached LED state matches the given (r, g, b) triplet, pysk8 will NOT send any LED update command to the SK8. If you want to force the command to be sent even if the local state matches the new colour, set this to False. Returns: True on success, False if an error occurred. """ r, g, b = map(int, [r, g, b]) if min([r, g, b]) < LED_MIN or max([r, g, b]) > LED_MAX: logger.warn('RGB channel values must be {}-{}'.format(LED_MIN, LED_MAX)) return False if check_state and (r, g, b) == self.led_state: return True # internally range is 0-3000 ir, ig, ib = map(lambda x: int(x * (INT_LED_MAX / LED_MAX)), [r, g, b]) val = struct.pack('<HHH', ir, ig, ib) extana_led = self.get_characteristic_handle_from_uuid(UUID_EXTANA_LED) if extana_led is None: logger.warn('Failed to find handle for ExtAna LED') return None if not self.dongle._write_attribute(self.conn_handle, extana_led, val): return False # update cached LED state if successful self.led_state = (r, g, b) return True
[ "def", "set_extana_led", "(", "self", ",", "r", ",", "g", ",", "b", ",", "check_state", "=", "True", ")", ":", "r", ",", "g", ",", "b", "=", "map", "(", "int", ",", "[", "r", ",", "g", ",", "b", "]", ")", "if", "min", "(", "[", "r", ",", "g", ",", "b", "]", ")", "<", "LED_MIN", "or", "max", "(", "[", "r", ",", "g", ",", "b", "]", ")", ">", "LED_MAX", ":", "logger", ".", "warn", "(", "'RGB channel values must be {}-{}'", ".", "format", "(", "LED_MIN", ",", "LED_MAX", ")", ")", "return", "False", "if", "check_state", "and", "(", "r", ",", "g", ",", "b", ")", "==", "self", ".", "led_state", ":", "return", "True", "# internally range is 0-3000", "ir", ",", "ig", ",", "ib", "=", "map", "(", "lambda", "x", ":", "int", "(", "x", "*", "(", "INT_LED_MAX", "/", "LED_MAX", ")", ")", ",", "[", "r", ",", "g", ",", "b", "]", ")", "val", "=", "struct", ".", "pack", "(", "'<HHH'", ",", "ir", ",", "ig", ",", "ib", ")", "extana_led", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_EXTANA_LED", ")", "if", "extana_led", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for ExtAna LED'", ")", "return", "None", "if", "not", "self", ".", "dongle", ".", "_write_attribute", "(", "self", ".", "conn_handle", ",", "extana_led", ",", "val", ")", ":", "return", "False", "# update cached LED state if successful", "self", ".", "led_state", "=", "(", "r", ",", "g", ",", "b", ")", "return", "True" ]
Update the colour of the RGB LED on the SK8-ExtAna board. Args: r (int): red channel, 0-255 g (int): green channel, 0-255 b (int): blue channel, 0-255 check_state (bool): if True (default) and the locally cached LED state matches the given (r, g, b) triplet, pysk8 will NOT send any LED update command to the SK8. If you want to force the command to be sent even if the local state matches the new colour, set this to False. Returns: True on success, False if an error occurred.
[ "Update", "the", "colour", "of", "the", "RGB", "LED", "on", "the", "SK8", "-", "ExtAna", "board", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L403-L442
245,829
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.set_imu_callback
def set_imu_callback(self, callback, data=None): """Register a callback for incoming IMU data packets. This method allows you to pass in a callbable which will be called on receipt of each IMU data packet sent by this SK8 device. Set to `None` to disable it again. Args: callback: a callable with the following signature: (acc, gyro, mag, imu_index, seq, timestamp, data) where: acc, gyro, mag = sensor data ([x,y,z] in each case) imu_index = originating IMU number (int, 0-4) seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback """ self.imu_callback = callback self.imu_callback_data = data
python
def set_imu_callback(self, callback, data=None): """Register a callback for incoming IMU data packets. This method allows you to pass in a callbable which will be called on receipt of each IMU data packet sent by this SK8 device. Set to `None` to disable it again. Args: callback: a callable with the following signature: (acc, gyro, mag, imu_index, seq, timestamp, data) where: acc, gyro, mag = sensor data ([x,y,z] in each case) imu_index = originating IMU number (int, 0-4) seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback """ self.imu_callback = callback self.imu_callback_data = data
[ "def", "set_imu_callback", "(", "self", ",", "callback", ",", "data", "=", "None", ")", ":", "self", ".", "imu_callback", "=", "callback", "self", ".", "imu_callback_data", "=", "data" ]
Register a callback for incoming IMU data packets. This method allows you to pass in a callbable which will be called on receipt of each IMU data packet sent by this SK8 device. Set to `None` to disable it again. Args: callback: a callable with the following signature: (acc, gyro, mag, imu_index, seq, timestamp, data) where: acc, gyro, mag = sensor data ([x,y,z] in each case) imu_index = originating IMU number (int, 0-4) seq = packet sequence number (int, 0-255) timestamp = value of time.time() when packet received data = value of `data` parameter passed to this method data: an optional arbitrary object that will be passed as a parameter to the callback
[ "Register", "a", "callback", "for", "incoming", "IMU", "data", "packets", ".", "This", "method", "allows", "you", "to", "pass", "in", "a", "callbable", "which", "will", "be", "called", "on", "receipt", "of", "each", "IMU", "data", "packet", "sent", "by", "this", "SK8", "device", ".", "Set", "to", "None", "to", "disable", "it", "again", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L444-L464
245,830
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.enable_imu_streaming
def enable_imu_streaming(self, enabled_imus, enabled_sensors=SENSOR_ALL): """Configures and enables IMU sensor data streaming. NOTE: only one streaming mode can be active at any time, so e.g. if you want to stream IMU data, you must disable SK8-ExtAna streaming first. Args: enabled_imus (list): a list of distinct ints in the range `0`-`4` inclusive identifying the IMU. `0` is the SK8 itself, and `1`-`4` are the subsidiary IMUs on the USB chain, starting from the end closest to the SK8. enabled_sensors (int): to save battery, you can choose to enable some or all of the sensors on each enabled IMU. By default, the accelerometer, magnetometer, and gyroscope are all enabled. Pass a bitwise OR of one or more of :const:`SENSOR_ACC`, :const:`SENSOR_MAG`, and :const:`SENSOR_GYRO` to gain finer control over the active sensors. Returns: bool. True if successful, False if an error occurred. """ imus_enabled = 0 for imu in enabled_imus: imus_enabled |= (1 << imu) if enabled_sensors == 0: logger.warn('Not enabling IMUs, no sensors enabled!') return False if not self.dongle._enable_imu_streaming(self, imus_enabled, enabled_sensors): logger.warn('Failed to enable IMU streaming (imus_enabled={}, enabled_sensors={}'.format(imus_enabled, enabled_sensors)) return False self.enabled_imus = enabled_imus self.enabled_sensors = enabled_sensors return True
python
def enable_imu_streaming(self, enabled_imus, enabled_sensors=SENSOR_ALL): """Configures and enables IMU sensor data streaming. NOTE: only one streaming mode can be active at any time, so e.g. if you want to stream IMU data, you must disable SK8-ExtAna streaming first. Args: enabled_imus (list): a list of distinct ints in the range `0`-`4` inclusive identifying the IMU. `0` is the SK8 itself, and `1`-`4` are the subsidiary IMUs on the USB chain, starting from the end closest to the SK8. enabled_sensors (int): to save battery, you can choose to enable some or all of the sensors on each enabled IMU. By default, the accelerometer, magnetometer, and gyroscope are all enabled. Pass a bitwise OR of one or more of :const:`SENSOR_ACC`, :const:`SENSOR_MAG`, and :const:`SENSOR_GYRO` to gain finer control over the active sensors. Returns: bool. True if successful, False if an error occurred. """ imus_enabled = 0 for imu in enabled_imus: imus_enabled |= (1 << imu) if enabled_sensors == 0: logger.warn('Not enabling IMUs, no sensors enabled!') return False if not self.dongle._enable_imu_streaming(self, imus_enabled, enabled_sensors): logger.warn('Failed to enable IMU streaming (imus_enabled={}, enabled_sensors={}'.format(imus_enabled, enabled_sensors)) return False self.enabled_imus = enabled_imus self.enabled_sensors = enabled_sensors return True
[ "def", "enable_imu_streaming", "(", "self", ",", "enabled_imus", ",", "enabled_sensors", "=", "SENSOR_ALL", ")", ":", "imus_enabled", "=", "0", "for", "imu", "in", "enabled_imus", ":", "imus_enabled", "|=", "(", "1", "<<", "imu", ")", "if", "enabled_sensors", "==", "0", ":", "logger", ".", "warn", "(", "'Not enabling IMUs, no sensors enabled!'", ")", "return", "False", "if", "not", "self", ".", "dongle", ".", "_enable_imu_streaming", "(", "self", ",", "imus_enabled", ",", "enabled_sensors", ")", ":", "logger", ".", "warn", "(", "'Failed to enable IMU streaming (imus_enabled={}, enabled_sensors={}'", ".", "format", "(", "imus_enabled", ",", "enabled_sensors", ")", ")", "return", "False", "self", ".", "enabled_imus", "=", "enabled_imus", "self", ".", "enabled_sensors", "=", "enabled_sensors", "return", "True" ]
Configures and enables IMU sensor data streaming. NOTE: only one streaming mode can be active at any time, so e.g. if you want to stream IMU data, you must disable SK8-ExtAna streaming first. Args: enabled_imus (list): a list of distinct ints in the range `0`-`4` inclusive identifying the IMU. `0` is the SK8 itself, and `1`-`4` are the subsidiary IMUs on the USB chain, starting from the end closest to the SK8. enabled_sensors (int): to save battery, you can choose to enable some or all of the sensors on each enabled IMU. By default, the accelerometer, magnetometer, and gyroscope are all enabled. Pass a bitwise OR of one or more of :const:`SENSOR_ACC`, :const:`SENSOR_MAG`, and :const:`SENSOR_GYRO` to gain finer control over the active sensors. Returns: bool. True if successful, False if an error occurred.
[ "Configures", "and", "enables", "IMU", "sensor", "data", "streaming", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L466-L501
245,831
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.disable_imu_streaming
def disable_imu_streaming(self): """Disable IMU streaming for this device. Returns: True on success, False if an error occurred. """ self.enabled_imus = [] # reset IMU data state for imu in self.imus: imu.reset() return self.dongle._disable_imu_streaming(self)
python
def disable_imu_streaming(self): """Disable IMU streaming for this device. Returns: True on success, False if an error occurred. """ self.enabled_imus = [] # reset IMU data state for imu in self.imus: imu.reset() return self.dongle._disable_imu_streaming(self)
[ "def", "disable_imu_streaming", "(", "self", ")", ":", "self", ".", "enabled_imus", "=", "[", "]", "# reset IMU data state", "for", "imu", "in", "self", ".", "imus", ":", "imu", ".", "reset", "(", ")", "return", "self", ".", "dongle", ".", "_disable_imu_streaming", "(", "self", ")" ]
Disable IMU streaming for this device. Returns: True on success, False if an error occurred.
[ "Disable", "IMU", "streaming", "for", "this", "device", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L525-L535
245,832
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.get_battery_level
def get_battery_level(self): """Reads the battery level descriptor on the device. Returns: int. If successful this will be a positive value representing the current battery level as a percentage. On error, -1 is returned. """ battery_level = self.get_characteristic_handle_from_uuid(UUID_BATTERY_LEVEL) if battery_level is None: logger.warn('Failed to find handle for battery level') return None level = self.dongle._read_attribute(self.conn_handle, battery_level) if level is None: return -1 return ord(level)
python
def get_battery_level(self): """Reads the battery level descriptor on the device. Returns: int. If successful this will be a positive value representing the current battery level as a percentage. On error, -1 is returned. """ battery_level = self.get_characteristic_handle_from_uuid(UUID_BATTERY_LEVEL) if battery_level is None: logger.warn('Failed to find handle for battery level') return None level = self.dongle._read_attribute(self.conn_handle, battery_level) if level is None: return -1 return ord(level)
[ "def", "get_battery_level", "(", "self", ")", ":", "battery_level", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_BATTERY_LEVEL", ")", "if", "battery_level", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for battery level'", ")", "return", "None", "level", "=", "self", ".", "dongle", ".", "_read_attribute", "(", "self", ".", "conn_handle", ",", "battery_level", ")", "if", "level", "is", "None", ":", "return", "-", "1", "return", "ord", "(", "level", ")" ]
Reads the battery level descriptor on the device. Returns: int. If successful this will be a positive value representing the current battery level as a percentage. On error, -1 is returned.
[ "Reads", "the", "battery", "level", "descriptor", "on", "the", "device", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L537-L553
245,833
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.get_device_name
def get_device_name(self, cached=True): """Returns the SK8 device BLE name. Args: cached (bool): if True, returns the locally cached copy of the name. If this is set to False, or the name is not cached, it will read from the device instead. Returns: str. The current device name. May be `None` if an error occurs. """ if cached and self.name is not None: return self.name device_name = self.get_characteristic_handle_from_uuid(UUID_DEVICE_NAME) if device_name is None: logger.warn('Failed to find handle for device name') return None self.name = self.dongle._read_attribute(self.conn_handle, device_name) return self.name
python
def get_device_name(self, cached=True): """Returns the SK8 device BLE name. Args: cached (bool): if True, returns the locally cached copy of the name. If this is set to False, or the name is not cached, it will read from the device instead. Returns: str. The current device name. May be `None` if an error occurs. """ if cached and self.name is not None: return self.name device_name = self.get_characteristic_handle_from_uuid(UUID_DEVICE_NAME) if device_name is None: logger.warn('Failed to find handle for device name') return None self.name = self.dongle._read_attribute(self.conn_handle, device_name) return self.name
[ "def", "get_device_name", "(", "self", ",", "cached", "=", "True", ")", ":", "if", "cached", "and", "self", ".", "name", "is", "not", "None", ":", "return", "self", ".", "name", "device_name", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_DEVICE_NAME", ")", "if", "device_name", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for device name'", ")", "return", "None", "self", ".", "name", "=", "self", ".", "dongle", ".", "_read_attribute", "(", "self", ".", "conn_handle", ",", "device_name", ")", "return", "self", ".", "name" ]
Returns the SK8 device BLE name. Args: cached (bool): if True, returns the locally cached copy of the name. If this is set to False, or the name is not cached, it will read from the device instead. Returns: str. The current device name. May be `None` if an error occurs.
[ "Returns", "the", "SK8", "device", "BLE", "name", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L555-L574
245,834
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.set_device_name
def set_device_name(self, new_name): """Sets a new BLE device name for this SK8. Args: new_name (str): the new device name as an ASCII string, max 20 characters. Returns: True if the name was updated successfully, False otherwise. """ device_name = self.get_characteristic_handle_from_uuid(UUID_DEVICE_NAME) if device_name is None: logger.warn('Failed to find handle for device name') return False if len(new_name) > MAX_DEVICE_NAME_LEN: logger.error('Device name exceeds maximum length ({} > {})'.format(len(new_name), MAX_DEVICE_NAME_LEN)) return False if self.dongle._write_attribute(self.conn_handle, device_name, new_name.encode('ascii')): self.name = new_name return True return False
python
def set_device_name(self, new_name): """Sets a new BLE device name for this SK8. Args: new_name (str): the new device name as an ASCII string, max 20 characters. Returns: True if the name was updated successfully, False otherwise. """ device_name = self.get_characteristic_handle_from_uuid(UUID_DEVICE_NAME) if device_name is None: logger.warn('Failed to find handle for device name') return False if len(new_name) > MAX_DEVICE_NAME_LEN: logger.error('Device name exceeds maximum length ({} > {})'.format(len(new_name), MAX_DEVICE_NAME_LEN)) return False if self.dongle._write_attribute(self.conn_handle, device_name, new_name.encode('ascii')): self.name = new_name return True return False
[ "def", "set_device_name", "(", "self", ",", "new_name", ")", ":", "device_name", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_DEVICE_NAME", ")", "if", "device_name", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for device name'", ")", "return", "False", "if", "len", "(", "new_name", ")", ">", "MAX_DEVICE_NAME_LEN", ":", "logger", ".", "error", "(", "'Device name exceeds maximum length ({} > {})'", ".", "format", "(", "len", "(", "new_name", ")", ",", "MAX_DEVICE_NAME_LEN", ")", ")", "return", "False", "if", "self", ".", "dongle", ".", "_write_attribute", "(", "self", ".", "conn_handle", ",", "device_name", ",", "new_name", ".", "encode", "(", "'ascii'", ")", ")", ":", "self", ".", "name", "=", "new_name", "return", "True", "return", "False" ]
Sets a new BLE device name for this SK8. Args: new_name (str): the new device name as an ASCII string, max 20 characters. Returns: True if the name was updated successfully, False otherwise.
[ "Sets", "a", "new", "BLE", "device", "name", "for", "this", "SK8", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L576-L599
245,835
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.get_firmware_version
def get_firmware_version(self, cached=True): """Returns the SK8 device firmware version. Args: cached (bool): if True, returns the locally cached copy of the firmware version. If this is set to False, or the version is not cached, it will read from the device instead. Returns: str. The current firmware version string. May be `None` if an error occurs. """ if cached and self.firmware_version != 'unknown': return self.firmware_version firmware_version = self.get_characteristic_handle_from_uuid(UUID_FIRMWARE_REVISION) if firmware_version is None: logger.warn('Failed to find handle for firmware version') return None self.firmware_version = self.dongle._read_attribute(self.conn_handle, firmware_version) return self.firmware_version
python
def get_firmware_version(self, cached=True): """Returns the SK8 device firmware version. Args: cached (bool): if True, returns the locally cached copy of the firmware version. If this is set to False, or the version is not cached, it will read from the device instead. Returns: str. The current firmware version string. May be `None` if an error occurs. """ if cached and self.firmware_version != 'unknown': return self.firmware_version firmware_version = self.get_characteristic_handle_from_uuid(UUID_FIRMWARE_REVISION) if firmware_version is None: logger.warn('Failed to find handle for firmware version') return None self.firmware_version = self.dongle._read_attribute(self.conn_handle, firmware_version) return self.firmware_version
[ "def", "get_firmware_version", "(", "self", ",", "cached", "=", "True", ")", ":", "if", "cached", "and", "self", ".", "firmware_version", "!=", "'unknown'", ":", "return", "self", ".", "firmware_version", "firmware_version", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_FIRMWARE_REVISION", ")", "if", "firmware_version", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for firmware version'", ")", "return", "None", "self", ".", "firmware_version", "=", "self", ".", "dongle", ".", "_read_attribute", "(", "self", ".", "conn_handle", ",", "firmware_version", ")", "return", "self", ".", "firmware_version" ]
Returns the SK8 device firmware version. Args: cached (bool): if True, returns the locally cached copy of the firmware version. If this is set to False, or the version is not cached, it will read from the device instead. Returns: str. The current firmware version string. May be `None` if an error occurs.
[ "Returns", "the", "SK8", "device", "firmware", "version", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L614-L634
245,836
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.get_service
def get_service(self, uuid): """Lookup information about a given GATT service. Args: uuid (str): a string containing the hex-encoded service UUID Returns: None if an error occurs, otherwise a :class:`Service` object. """ if uuid in self.services: return self.services[uuid] if pp_hex(uuid) in self.services: return self.services[pp_hex(uuid)] return None
python
def get_service(self, uuid): """Lookup information about a given GATT service. Args: uuid (str): a string containing the hex-encoded service UUID Returns: None if an error occurs, otherwise a :class:`Service` object. """ if uuid in self.services: return self.services[uuid] if pp_hex(uuid) in self.services: return self.services[pp_hex(uuid)] return None
[ "def", "get_service", "(", "self", ",", "uuid", ")", ":", "if", "uuid", "in", "self", ".", "services", ":", "return", "self", ".", "services", "[", "uuid", "]", "if", "pp_hex", "(", "uuid", ")", "in", "self", ".", "services", ":", "return", "self", ".", "services", "[", "pp_hex", "(", "uuid", ")", "]", "return", "None" ]
Lookup information about a given GATT service. Args: uuid (str): a string containing the hex-encoded service UUID Returns: None if an error occurs, otherwise a :class:`Service` object.
[ "Lookup", "information", "about", "a", "given", "GATT", "service", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L809-L824
245,837
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.get_polling_override
def get_polling_override(self): """Get the current polling override value in milliseconds. See :meth:`set_polling_override` for more information. Returns: None on error, otherwise the current override period in milliseconds (0 = disabled). """ polling_override = self.get_characteristic_handle_from_uuid(UUID_POLLING_OVERRIDE) if polling_override is None: logger.warn('Failed to find handle for polling override') return None override_ms = self.dongle._read_attribute(self.conn_handle, polling_override, True) return None if override_ms is None else ord(override_ms)
python
def get_polling_override(self): """Get the current polling override value in milliseconds. See :meth:`set_polling_override` for more information. Returns: None on error, otherwise the current override period in milliseconds (0 = disabled). """ polling_override = self.get_characteristic_handle_from_uuid(UUID_POLLING_OVERRIDE) if polling_override is None: logger.warn('Failed to find handle for polling override') return None override_ms = self.dongle._read_attribute(self.conn_handle, polling_override, True) return None if override_ms is None else ord(override_ms)
[ "def", "get_polling_override", "(", "self", ")", ":", "polling_override", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_POLLING_OVERRIDE", ")", "if", "polling_override", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for polling override'", ")", "return", "None", "override_ms", "=", "self", ".", "dongle", ".", "_read_attribute", "(", "self", ".", "conn_handle", ",", "polling_override", ",", "True", ")", "return", "None", "if", "override_ms", "is", "None", "else", "ord", "(", "override_ms", ")" ]
Get the current polling override value in milliseconds. See :meth:`set_polling_override` for more information. Returns: None on error, otherwise the current override period in milliseconds (0 = disabled).
[ "Get", "the", "current", "polling", "override", "value", "in", "milliseconds", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L842-L856
245,838
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
SK8.set_polling_override
def set_polling_override(self, override): """Set the sensor polling timer override value in milliseconds. Due to the time it takes to poll all the sensors on up to 5 IMUs, it's not possible for the SK8 firmware to define a single fixed rate for reading new samples without it being artificially low for most configurations. Instead the firmware tries to define a sensible default value for each combination of IMUs and sensors that can be enabled (any combination of 1-5 IMUs and 1-3 sensors on each IMU). In most cases this should work well, but for example if you have multiple SK8s connected through the same dongle and have multiple IMUs enabled on each, you may find packets start to be dropped quite frequently. To mitigate this, you can adjust the period of the timer used by the firmware to poll for new sensor data (and send data packets to the host device). The value should be in integer milliseconds, and have a minimum value of 20. Values below 20 will be treated as a request to disable the override and return to the default polling period. The method can be called before or after streaming is activated, and will take effect immediately. NOTE1: the value is stored in RAM and will not persist across reboots, although it should persist for multiple connections. NOTE2: once set, the override applies to ALL sensor configurations, so for example if you set it while using 5 IMUs on 2 SK8s, then switch to using 1 IMU on each SK8, you will probably want to disable it again as the latter configuration should work fine with the default period. Args: override (int): polling timer override period in milliseconds. Values below 20 are treated as 0, and have the effect of disabling the override in favour of the default periods. Returns: True on success, False on error. """ polling_override = self.get_characteristic_handle_from_uuid(UUID_POLLING_OVERRIDE) if polling_override is None: logger.warn('Failed to find handle for device name') return False if self.dongle._write_attribute(self.conn_handle, polling_override, struct.pack('B', override)): return True return False
python
def set_polling_override(self, override): """Set the sensor polling timer override value in milliseconds. Due to the time it takes to poll all the sensors on up to 5 IMUs, it's not possible for the SK8 firmware to define a single fixed rate for reading new samples without it being artificially low for most configurations. Instead the firmware tries to define a sensible default value for each combination of IMUs and sensors that can be enabled (any combination of 1-5 IMUs and 1-3 sensors on each IMU). In most cases this should work well, but for example if you have multiple SK8s connected through the same dongle and have multiple IMUs enabled on each, you may find packets start to be dropped quite frequently. To mitigate this, you can adjust the period of the timer used by the firmware to poll for new sensor data (and send data packets to the host device). The value should be in integer milliseconds, and have a minimum value of 20. Values below 20 will be treated as a request to disable the override and return to the default polling period. The method can be called before or after streaming is activated, and will take effect immediately. NOTE1: the value is stored in RAM and will not persist across reboots, although it should persist for multiple connections. NOTE2: once set, the override applies to ALL sensor configurations, so for example if you set it while using 5 IMUs on 2 SK8s, then switch to using 1 IMU on each SK8, you will probably want to disable it again as the latter configuration should work fine with the default period. Args: override (int): polling timer override period in milliseconds. Values below 20 are treated as 0, and have the effect of disabling the override in favour of the default periods. Returns: True on success, False on error. """ polling_override = self.get_characteristic_handle_from_uuid(UUID_POLLING_OVERRIDE) if polling_override is None: logger.warn('Failed to find handle for device name') return False if self.dongle._write_attribute(self.conn_handle, polling_override, struct.pack('B', override)): return True return False
[ "def", "set_polling_override", "(", "self", ",", "override", ")", ":", "polling_override", "=", "self", ".", "get_characteristic_handle_from_uuid", "(", "UUID_POLLING_OVERRIDE", ")", "if", "polling_override", "is", "None", ":", "logger", ".", "warn", "(", "'Failed to find handle for device name'", ")", "return", "False", "if", "self", ".", "dongle", ".", "_write_attribute", "(", "self", ".", "conn_handle", ",", "polling_override", ",", "struct", ".", "pack", "(", "'B'", ",", "override", ")", ")", ":", "return", "True", "return", "False" ]
Set the sensor polling timer override value in milliseconds. Due to the time it takes to poll all the sensors on up to 5 IMUs, it's not possible for the SK8 firmware to define a single fixed rate for reading new samples without it being artificially low for most configurations. Instead the firmware tries to define a sensible default value for each combination of IMUs and sensors that can be enabled (any combination of 1-5 IMUs and 1-3 sensors on each IMU). In most cases this should work well, but for example if you have multiple SK8s connected through the same dongle and have multiple IMUs enabled on each, you may find packets start to be dropped quite frequently. To mitigate this, you can adjust the period of the timer used by the firmware to poll for new sensor data (and send data packets to the host device). The value should be in integer milliseconds, and have a minimum value of 20. Values below 20 will be treated as a request to disable the override and return to the default polling period. The method can be called before or after streaming is activated, and will take effect immediately. NOTE1: the value is stored in RAM and will not persist across reboots, although it should persist for multiple connections. NOTE2: once set, the override applies to ALL sensor configurations, so for example if you set it while using 5 IMUs on 2 SK8s, then switch to using 1 IMU on each SK8, you will probably want to disable it again as the latter configuration should work fine with the default period. Args: override (int): polling timer override period in milliseconds. Values below 20 are treated as 0, and have the effect of disabling the override in favour of the default periods. Returns: True on success, False on error.
[ "Set", "the", "sensor", "polling", "timer", "override", "value", "in", "milliseconds", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L858-L904
245,839
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.init
def init(self, address, hard_reset=False): """Open the serial connection to a dongle at the supplied address. Args: address (str): the serial port address of the BLED112 dongle, e.g. 'COM5' hard_reset (bool): not currently used Returns: True if a connection with the dongle was established, False otherwise. """ self.address = address if hard_reset: # TODO (needs more work to be usable) # if not Dongle._hard_reset(address): # return False # time.sleep(2.0) pass # TODO timeout not working if opened on valid, non Bluegiga port for i in range(Dongle.PORT_RETRIES): try: logger.debug('Setting up BGAPI, attempt {}/{}'.format(i + 1, Dongle.PORT_RETRIES)) self.api = BlueGigaAPI(port=self.address, callbacks=self, baud=Dongle.BAUDRATE, timeout=DEF_TIMEOUT) self.api.start_daemon() break except serial.serialutil.SerialException as e: logger.debug('Failed to init BlueGigaAPI: {}, attempt {}/{}'.format(e, i + 1, Dongle.PORT_RETRIES)) time.sleep(0.1) if self.api is None: return False time.sleep(0.5) # TODO self.get_supported_connections() logger.info('Dongle supports {} connections'.format(self.supported_connections)) if self.supported_connections == -1: logger.error('Failed to retrieve number of supported connections from the dongle! (try reinserting it)') return False self.conn_state = {x: self._STATE_IDLE for x in range(self.supported_connections)} self.reset() self._cbthread = threading.Thread(target=self._cbthreadfunc) self._cbthread.setDaemon(True) self._cbthread_q = Queue() self._cbthread.start() return True
python
def init(self, address, hard_reset=False): """Open the serial connection to a dongle at the supplied address. Args: address (str): the serial port address of the BLED112 dongle, e.g. 'COM5' hard_reset (bool): not currently used Returns: True if a connection with the dongle was established, False otherwise. """ self.address = address if hard_reset: # TODO (needs more work to be usable) # if not Dongle._hard_reset(address): # return False # time.sleep(2.0) pass # TODO timeout not working if opened on valid, non Bluegiga port for i in range(Dongle.PORT_RETRIES): try: logger.debug('Setting up BGAPI, attempt {}/{}'.format(i + 1, Dongle.PORT_RETRIES)) self.api = BlueGigaAPI(port=self.address, callbacks=self, baud=Dongle.BAUDRATE, timeout=DEF_TIMEOUT) self.api.start_daemon() break except serial.serialutil.SerialException as e: logger.debug('Failed to init BlueGigaAPI: {}, attempt {}/{}'.format(e, i + 1, Dongle.PORT_RETRIES)) time.sleep(0.1) if self.api is None: return False time.sleep(0.5) # TODO self.get_supported_connections() logger.info('Dongle supports {} connections'.format(self.supported_connections)) if self.supported_connections == -1: logger.error('Failed to retrieve number of supported connections from the dongle! (try reinserting it)') return False self.conn_state = {x: self._STATE_IDLE for x in range(self.supported_connections)} self.reset() self._cbthread = threading.Thread(target=self._cbthreadfunc) self._cbthread.setDaemon(True) self._cbthread_q = Queue() self._cbthread.start() return True
[ "def", "init", "(", "self", ",", "address", ",", "hard_reset", "=", "False", ")", ":", "self", ".", "address", "=", "address", "if", "hard_reset", ":", "# TODO (needs more work to be usable)", "# if not Dongle._hard_reset(address):", "# return False", "# time.sleep(2.0)", "pass", "# TODO timeout not working if opened on valid, non Bluegiga port", "for", "i", "in", "range", "(", "Dongle", ".", "PORT_RETRIES", ")", ":", "try", ":", "logger", ".", "debug", "(", "'Setting up BGAPI, attempt {}/{}'", ".", "format", "(", "i", "+", "1", ",", "Dongle", ".", "PORT_RETRIES", ")", ")", "self", ".", "api", "=", "BlueGigaAPI", "(", "port", "=", "self", ".", "address", ",", "callbacks", "=", "self", ",", "baud", "=", "Dongle", ".", "BAUDRATE", ",", "timeout", "=", "DEF_TIMEOUT", ")", "self", ".", "api", ".", "start_daemon", "(", ")", "break", "except", "serial", ".", "serialutil", ".", "SerialException", "as", "e", ":", "logger", ".", "debug", "(", "'Failed to init BlueGigaAPI: {}, attempt {}/{}'", ".", "format", "(", "e", ",", "i", "+", "1", ",", "Dongle", ".", "PORT_RETRIES", ")", ")", "time", ".", "sleep", "(", "0.1", ")", "if", "self", ".", "api", "is", "None", ":", "return", "False", "time", ".", "sleep", "(", "0.5", ")", "# TODO", "self", ".", "get_supported_connections", "(", ")", "logger", ".", "info", "(", "'Dongle supports {} connections'", ".", "format", "(", "self", ".", "supported_connections", ")", ")", "if", "self", ".", "supported_connections", "==", "-", "1", ":", "logger", ".", "error", "(", "'Failed to retrieve number of supported connections from the dongle! (try reinserting it)'", ")", "return", "False", "self", ".", "conn_state", "=", "{", "x", ":", "self", ".", "_STATE_IDLE", "for", "x", "in", "range", "(", "self", ".", "supported_connections", ")", "}", "self", ".", "reset", "(", ")", "self", ".", "_cbthread", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "_cbthreadfunc", ")", "self", ".", "_cbthread", ".", "setDaemon", "(", "True", ")", "self", ".", "_cbthread_q", "=", "Queue", "(", ")", "self", ".", "_cbthread", ".", "start", "(", ")", "return", "True" ]
Open the serial connection to a dongle at the supplied address. Args: address (str): the serial port address of the BLED112 dongle, e.g. 'COM5' hard_reset (bool): not currently used Returns: True if a connection with the dongle was established, False otherwise.
[ "Open", "the", "serial", "connection", "to", "a", "dongle", "at", "the", "supplied", "address", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L951-L998
245,840
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.find_dongle_port
def find_dongle_port(): """Convenience method which attempts to find the port where a BLED112 dongle is connected. This relies on the `pyserial.tools.list_ports.grep method <https://pyserial.readthedocs.io/en/latest/tools.html#serial.tools.list_ports.grep>`_, and simply searches for a port containing the string "Bluegiga" in its description. (This probably only works on Windows at the moment (TODO)). Returns: A string containing the detected port address, or `None` if no matching port was found. """ logger.debug('Attempting to find Bluegiga dongle...') # TODO this will probably only work on Windows at the moment ports = list(serial.tools.list_ports.grep('Bluegiga')) if len(ports) == 0: logger.debug('No Bluegiga-named serial ports discovered') return None # just return the first port even if multiple dongles logger.debug('Found "Bluegiga" serial port at {}'.format(ports[0].device)) return ports[0].device
python
def find_dongle_port(): """Convenience method which attempts to find the port where a BLED112 dongle is connected. This relies on the `pyserial.tools.list_ports.grep method <https://pyserial.readthedocs.io/en/latest/tools.html#serial.tools.list_ports.grep>`_, and simply searches for a port containing the string "Bluegiga" in its description. (This probably only works on Windows at the moment (TODO)). Returns: A string containing the detected port address, or `None` if no matching port was found. """ logger.debug('Attempting to find Bluegiga dongle...') # TODO this will probably only work on Windows at the moment ports = list(serial.tools.list_ports.grep('Bluegiga')) if len(ports) == 0: logger.debug('No Bluegiga-named serial ports discovered') return None # just return the first port even if multiple dongles logger.debug('Found "Bluegiga" serial port at {}'.format(ports[0].device)) return ports[0].device
[ "def", "find_dongle_port", "(", ")", ":", "logger", ".", "debug", "(", "'Attempting to find Bluegiga dongle...'", ")", "# TODO this will probably only work on Windows at the moment", "ports", "=", "list", "(", "serial", ".", "tools", ".", "list_ports", ".", "grep", "(", "'Bluegiga'", ")", ")", "if", "len", "(", "ports", ")", "==", "0", ":", "logger", ".", "debug", "(", "'No Bluegiga-named serial ports discovered'", ")", "return", "None", "# just return the first port even if multiple dongles", "logger", ".", "debug", "(", "'Found \"Bluegiga\" serial port at {}'", ".", "format", "(", "ports", "[", "0", "]", ".", "device", ")", ")", "return", "ports", "[", "0", "]", ".", "device" ]
Convenience method which attempts to find the port where a BLED112 dongle is connected. This relies on the `pyserial.tools.list_ports.grep method <https://pyserial.readthedocs.io/en/latest/tools.html#serial.tools.list_ports.grep>`_, and simply searches for a port containing the string "Bluegiga" in its description. (This probably only works on Windows at the moment (TODO)). Returns: A string containing the detected port address, or `None` if no matching port was found.
[ "Convenience", "method", "which", "attempts", "to", "find", "the", "port", "where", "a", "BLED112", "dongle", "is", "connected", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1006-L1026
245,841
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.reset
def reset(self): """Attempts to reset the dongle to a known state. When called, this method will reset the internal state of the object, and disconnect any active connections. """ logger.debug('resetting dongle state') self._clear() if self.api is not None: self._set_state(Dongle._STATE_RESET) self.api.ble_cmd_gap_set_mode(gap_discoverable_mode['gap_non_discoverable'], gap_connectable_mode['gap_non_connectable']) self._wait_for_state(self._STATE_RESET) for i in range(self.supported_connections): self._set_conn_state(i, self._STATE_DISCONNECTING) self.api.ble_cmd_connection_disconnect(i) self._wait_for_conn_state(i, self._STATE_DISCONNECTING) logger.debug('reset completed')
python
def reset(self): """Attempts to reset the dongle to a known state. When called, this method will reset the internal state of the object, and disconnect any active connections. """ logger.debug('resetting dongle state') self._clear() if self.api is not None: self._set_state(Dongle._STATE_RESET) self.api.ble_cmd_gap_set_mode(gap_discoverable_mode['gap_non_discoverable'], gap_connectable_mode['gap_non_connectable']) self._wait_for_state(self._STATE_RESET) for i in range(self.supported_connections): self._set_conn_state(i, self._STATE_DISCONNECTING) self.api.ble_cmd_connection_disconnect(i) self._wait_for_conn_state(i, self._STATE_DISCONNECTING) logger.debug('reset completed')
[ "def", "reset", "(", "self", ")", ":", "logger", ".", "debug", "(", "'resetting dongle state'", ")", "self", ".", "_clear", "(", ")", "if", "self", ".", "api", "is", "not", "None", ":", "self", ".", "_set_state", "(", "Dongle", ".", "_STATE_RESET", ")", "self", ".", "api", ".", "ble_cmd_gap_set_mode", "(", "gap_discoverable_mode", "[", "'gap_non_discoverable'", "]", ",", "gap_connectable_mode", "[", "'gap_non_connectable'", "]", ")", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_RESET", ")", "for", "i", "in", "range", "(", "self", ".", "supported_connections", ")", ":", "self", ".", "_set_conn_state", "(", "i", ",", "self", ".", "_STATE_DISCONNECTING", ")", "self", ".", "api", ".", "ble_cmd_connection_disconnect", "(", "i", ")", "self", ".", "_wait_for_conn_state", "(", "i", ",", "self", ".", "_STATE_DISCONNECTING", ")", "logger", ".", "debug", "(", "'reset completed'", ")" ]
Attempts to reset the dongle to a known state. When called, this method will reset the internal state of the object, and disconnect any active connections.
[ "Attempts", "to", "reset", "the", "dongle", "to", "a", "known", "state", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1028-L1048
245,842
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.set_reconnect_parameters
def set_reconnect_parameters(self, interval, attempts, restore_state=True): """Sets the behaviour of the automatic reconnect feature. When a connected SK8 is disconnected unexpectedly (in other words not by a user-triggered action), an automatic attempt to reconnect to the device can be made. If successful this will typically resume the connection with an interruption of only a few seconds. This method allows the application to configure some aspects of the automatic reconnect functionality. Args: interval (float): time in seconds between successive attempts to reconnect. Also applies to the delay between the initial disconnection and the first attempt to reconnect. attempts (int): the number of attempts to make to recreate the connection. This can be set to zero in order to disable the reconnection feature. restore_state (bool): if True, the streaming state of the device will also be restored if possible. For example, the IMU configuration will be re-applied after the reconnection attempt succeeds, to return the SK8 to the same state it was in before the disconnection occurred. Returns: None """ self._reconnect_attempts = max(0, attempts) self._reconnect_interval = max(0, interval) self._reconnect_restore_state = restore_state
python
def set_reconnect_parameters(self, interval, attempts, restore_state=True): """Sets the behaviour of the automatic reconnect feature. When a connected SK8 is disconnected unexpectedly (in other words not by a user-triggered action), an automatic attempt to reconnect to the device can be made. If successful this will typically resume the connection with an interruption of only a few seconds. This method allows the application to configure some aspects of the automatic reconnect functionality. Args: interval (float): time in seconds between successive attempts to reconnect. Also applies to the delay between the initial disconnection and the first attempt to reconnect. attempts (int): the number of attempts to make to recreate the connection. This can be set to zero in order to disable the reconnection feature. restore_state (bool): if True, the streaming state of the device will also be restored if possible. For example, the IMU configuration will be re-applied after the reconnection attempt succeeds, to return the SK8 to the same state it was in before the disconnection occurred. Returns: None """ self._reconnect_attempts = max(0, attempts) self._reconnect_interval = max(0, interval) self._reconnect_restore_state = restore_state
[ "def", "set_reconnect_parameters", "(", "self", ",", "interval", ",", "attempts", ",", "restore_state", "=", "True", ")", ":", "self", ".", "_reconnect_attempts", "=", "max", "(", "0", ",", "attempts", ")", "self", ".", "_reconnect_interval", "=", "max", "(", "0", ",", "interval", ")", "self", ".", "_reconnect_restore_state", "=", "restore_state" ]
Sets the behaviour of the automatic reconnect feature. When a connected SK8 is disconnected unexpectedly (in other words not by a user-triggered action), an automatic attempt to reconnect to the device can be made. If successful this will typically resume the connection with an interruption of only a few seconds. This method allows the application to configure some aspects of the automatic reconnect functionality. Args: interval (float): time in seconds between successive attempts to reconnect. Also applies to the delay between the initial disconnection and the first attempt to reconnect. attempts (int): the number of attempts to make to recreate the connection. This can be set to zero in order to disable the reconnection feature. restore_state (bool): if True, the streaming state of the device will also be restored if possible. For example, the IMU configuration will be re-applied after the reconnection attempt succeeds, to return the SK8 to the same state it was in before the disconnection occurred. Returns: None
[ "Sets", "the", "behaviour", "of", "the", "automatic", "reconnect", "feature", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1093-L1120
245,843
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.scan_and_connect
def scan_and_connect(self, devnames, timeout=DEF_TIMEOUT, calibration=True): """Scan for and then connect to a set of one or more SK8s. This method is intended to be a simple way to combine the steps of running a BLE scan, checking the results and connecting to one or more devices. When called, a scan is started for a period equal to `timeout`, and a list of devices is collected. If at any point during the scan all of the supplied devices are detected, the scan will be ended immediately. After the scan has completed, the method will only proceed to creating connections if the scan results contain all the specified devices. Args: devnames (list): a list of device names (1 or more) timeout (float): a time period in seconds to run the scanning process (will be terminated early if all devices in `devnames` are discovered) Returns: Returns the same results as :meth:`connect`. """ responses = self.scan_devices(devnames, timeout) for dev in devnames: if dev not in responses: logger.error('Failed to find device {} during scan'.format(dev)) return (False, []) return self.connect([responses.get_device(dev) for dev in devnames], calibration)
python
def scan_and_connect(self, devnames, timeout=DEF_TIMEOUT, calibration=True): """Scan for and then connect to a set of one or more SK8s. This method is intended to be a simple way to combine the steps of running a BLE scan, checking the results and connecting to one or more devices. When called, a scan is started for a period equal to `timeout`, and a list of devices is collected. If at any point during the scan all of the supplied devices are detected, the scan will be ended immediately. After the scan has completed, the method will only proceed to creating connections if the scan results contain all the specified devices. Args: devnames (list): a list of device names (1 or more) timeout (float): a time period in seconds to run the scanning process (will be terminated early if all devices in `devnames` are discovered) Returns: Returns the same results as :meth:`connect`. """ responses = self.scan_devices(devnames, timeout) for dev in devnames: if dev not in responses: logger.error('Failed to find device {} during scan'.format(dev)) return (False, []) return self.connect([responses.get_device(dev) for dev in devnames], calibration)
[ "def", "scan_and_connect", "(", "self", ",", "devnames", ",", "timeout", "=", "DEF_TIMEOUT", ",", "calibration", "=", "True", ")", ":", "responses", "=", "self", ".", "scan_devices", "(", "devnames", ",", "timeout", ")", "for", "dev", "in", "devnames", ":", "if", "dev", "not", "in", "responses", ":", "logger", ".", "error", "(", "'Failed to find device {} during scan'", ".", "format", "(", "dev", ")", ")", "return", "(", "False", ",", "[", "]", ")", "return", "self", ".", "connect", "(", "[", "responses", ".", "get_device", "(", "dev", ")", "for", "dev", "in", "devnames", "]", ",", "calibration", ")" ]
Scan for and then connect to a set of one or more SK8s. This method is intended to be a simple way to combine the steps of running a BLE scan, checking the results and connecting to one or more devices. When called, a scan is started for a period equal to `timeout`, and a list of devices is collected. If at any point during the scan all of the supplied devices are detected, the scan will be ended immediately. After the scan has completed, the method will only proceed to creating connections if the scan results contain all the specified devices. Args: devnames (list): a list of device names (1 or more) timeout (float): a time period in seconds to run the scanning process (will be terminated early if all devices in `devnames` are discovered) Returns: Returns the same results as :meth:`connect`.
[ "Scan", "for", "and", "then", "connect", "to", "a", "set", "of", "one", "or", "more", "SK8s", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1134-L1161
245,844
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.begin_scan
def begin_scan(self, callback=None, interval=DEF_SCAN_INTERVAL, window=DEF_SCAN_WINDOW): """Begins a BLE scan and returns immediately. Using this method you can begin a BLE scan and leave the dongle in scanning mode in the background. It will remain in scanning mode until you call the :meth:`end_scan` method or the :meth:`reset` method. Args: callback (callbable): a callback that will be called for each new device discovered by the scanning process. Will be passed a single argument, a :class:`ScanResult` object. May be None if not needed. interval (int): BLE scan interval, in units of 625us window (int): BLE scan window, in units of 625us Returns: True on success, False otherwise. """ # TODO validate params and current state logger.debug('configuring scan parameters') self.api.ble_cmd_gap_set_scan_parameters(interval, window, 1) self._set_state(self._STATE_CONFIGURE_SCAN) self.api.ble_cmd_gap_discover(1) # any discoverable devices self._wait_for_state(self._STATE_CONFIGURE_SCAN) # TODO check state logger.debug('starting async scan for devices') self.scan_targets = None self.scan_callback = callback self._set_state(self._STATE_SCANNING) return True
python
def begin_scan(self, callback=None, interval=DEF_SCAN_INTERVAL, window=DEF_SCAN_WINDOW): """Begins a BLE scan and returns immediately. Using this method you can begin a BLE scan and leave the dongle in scanning mode in the background. It will remain in scanning mode until you call the :meth:`end_scan` method or the :meth:`reset` method. Args: callback (callbable): a callback that will be called for each new device discovered by the scanning process. Will be passed a single argument, a :class:`ScanResult` object. May be None if not needed. interval (int): BLE scan interval, in units of 625us window (int): BLE scan window, in units of 625us Returns: True on success, False otherwise. """ # TODO validate params and current state logger.debug('configuring scan parameters') self.api.ble_cmd_gap_set_scan_parameters(interval, window, 1) self._set_state(self._STATE_CONFIGURE_SCAN) self.api.ble_cmd_gap_discover(1) # any discoverable devices self._wait_for_state(self._STATE_CONFIGURE_SCAN) # TODO check state logger.debug('starting async scan for devices') self.scan_targets = None self.scan_callback = callback self._set_state(self._STATE_SCANNING) return True
[ "def", "begin_scan", "(", "self", ",", "callback", "=", "None", ",", "interval", "=", "DEF_SCAN_INTERVAL", ",", "window", "=", "DEF_SCAN_WINDOW", ")", ":", "# TODO validate params and current state", "logger", ".", "debug", "(", "'configuring scan parameters'", ")", "self", ".", "api", ".", "ble_cmd_gap_set_scan_parameters", "(", "interval", ",", "window", ",", "1", ")", "self", ".", "_set_state", "(", "self", ".", "_STATE_CONFIGURE_SCAN", ")", "self", ".", "api", ".", "ble_cmd_gap_discover", "(", "1", ")", "# any discoverable devices", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_CONFIGURE_SCAN", ")", "# TODO check state", "logger", ".", "debug", "(", "'starting async scan for devices'", ")", "self", ".", "scan_targets", "=", "None", "self", ".", "scan_callback", "=", "callback", "self", ".", "_set_state", "(", "self", ".", "_STATE_SCANNING", ")", "return", "True" ]
Begins a BLE scan and returns immediately. Using this method you can begin a BLE scan and leave the dongle in scanning mode in the background. It will remain in scanning mode until you call the :meth:`end_scan` method or the :meth:`reset` method. Args: callback (callbable): a callback that will be called for each new device discovered by the scanning process. Will be passed a single argument, a :class:`ScanResult` object. May be None if not needed. interval (int): BLE scan interval, in units of 625us window (int): BLE scan window, in units of 625us Returns: True on success, False otherwise.
[ "Begins", "a", "BLE", "scan", "and", "returns", "immediately", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1176-L1206
245,845
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.connect
def connect(self, devicelist, calibration=True): """Establish a connection to one or more SK8 devices. Given a list of 1 or more :class:`ScanResult` objects, this method will attempt to create a connection to each SK8 in sequence. It will return when all connections have been attempted, although they may not all have succeeded. In addition, the dongle has a limit on simultaneous connections, which you can retrieve by calling :meth:`get_supported_connections`. If the number of supplied device names exceeds this value then the method will abort immediately. Args: devicelist (list): a list of :class:`ScanResult` instances, one for each SK8 you wish to create a connection to. calibration (bool): True if calibration data should be loaded post-connection, for each device (if available). Returns: tuple (`result`, `devices`), where `result` is a bool indicating if connections were successfully made to all given devices. If True, `devices` will contain a list of :class:`SK8` instances representing the connected SK8 devices. If False, `devices` will contain a smaller number of :class:`SK8` instances depending on the number of connections that succeeded (possibly 0). """ if not isinstance(devicelist, list): devicelist = [devicelist] logger.debug('Connecting to {} devices'.format(len(devicelist))) if len(devicelist) > self.supported_connections: logging.error('Dongle firmware supports max {} connections, {} device connections requested!'.format(self.supported_connections, len(devicelist))) return (False, []) # TODO check number of active connections and fail if exceeds max connected_devices = [] all_connected = True for dev in devicelist: logger.info('Connecting to {} (name={})...'.format(dev.addr, dev.name)) self._set_state(self._STATE_CONNECTING) self.api.ble_cmd_gap_connect_direct(dev.raw_addr, 0, 6, 14, 100, 50) self._wait_for_state(self._STATE_CONNECTING, 5) if self.state != self._STATE_CONNECTED: logger.warn('Connection failed!') # send end procedure to cancel connection attempt self._set_state(self._STATE_GAP_END) self.api.ble_cmd_gap_end_procedure() self._wait_for_state(self._STATE_GAP_END) all_connected = False continue conn_handle = self.conn_handles[-1] logger.info('Connection OK, handle is 0x{:02X}'.format(conn_handle)) sk8 = SK8(self, conn_handle, dev, calibration) self._add_device(sk8) connected_devices.append(sk8) sk8._discover_services() time.sleep(0.1) # TODO return (all_connected, connected_devices)
python
def connect(self, devicelist, calibration=True): """Establish a connection to one or more SK8 devices. Given a list of 1 or more :class:`ScanResult` objects, this method will attempt to create a connection to each SK8 in sequence. It will return when all connections have been attempted, although they may not all have succeeded. In addition, the dongle has a limit on simultaneous connections, which you can retrieve by calling :meth:`get_supported_connections`. If the number of supplied device names exceeds this value then the method will abort immediately. Args: devicelist (list): a list of :class:`ScanResult` instances, one for each SK8 you wish to create a connection to. calibration (bool): True if calibration data should be loaded post-connection, for each device (if available). Returns: tuple (`result`, `devices`), where `result` is a bool indicating if connections were successfully made to all given devices. If True, `devices` will contain a list of :class:`SK8` instances representing the connected SK8 devices. If False, `devices` will contain a smaller number of :class:`SK8` instances depending on the number of connections that succeeded (possibly 0). """ if not isinstance(devicelist, list): devicelist = [devicelist] logger.debug('Connecting to {} devices'.format(len(devicelist))) if len(devicelist) > self.supported_connections: logging.error('Dongle firmware supports max {} connections, {} device connections requested!'.format(self.supported_connections, len(devicelist))) return (False, []) # TODO check number of active connections and fail if exceeds max connected_devices = [] all_connected = True for dev in devicelist: logger.info('Connecting to {} (name={})...'.format(dev.addr, dev.name)) self._set_state(self._STATE_CONNECTING) self.api.ble_cmd_gap_connect_direct(dev.raw_addr, 0, 6, 14, 100, 50) self._wait_for_state(self._STATE_CONNECTING, 5) if self.state != self._STATE_CONNECTED: logger.warn('Connection failed!') # send end procedure to cancel connection attempt self._set_state(self._STATE_GAP_END) self.api.ble_cmd_gap_end_procedure() self._wait_for_state(self._STATE_GAP_END) all_connected = False continue conn_handle = self.conn_handles[-1] logger.info('Connection OK, handle is 0x{:02X}'.format(conn_handle)) sk8 = SK8(self, conn_handle, dev, calibration) self._add_device(sk8) connected_devices.append(sk8) sk8._discover_services() time.sleep(0.1) # TODO return (all_connected, connected_devices)
[ "def", "connect", "(", "self", ",", "devicelist", ",", "calibration", "=", "True", ")", ":", "if", "not", "isinstance", "(", "devicelist", ",", "list", ")", ":", "devicelist", "=", "[", "devicelist", "]", "logger", ".", "debug", "(", "'Connecting to {} devices'", ".", "format", "(", "len", "(", "devicelist", ")", ")", ")", "if", "len", "(", "devicelist", ")", ">", "self", ".", "supported_connections", ":", "logging", ".", "error", "(", "'Dongle firmware supports max {} connections, {} device connections requested!'", ".", "format", "(", "self", ".", "supported_connections", ",", "len", "(", "devicelist", ")", ")", ")", "return", "(", "False", ",", "[", "]", ")", "# TODO check number of active connections and fail if exceeds max", "connected_devices", "=", "[", "]", "all_connected", "=", "True", "for", "dev", "in", "devicelist", ":", "logger", ".", "info", "(", "'Connecting to {} (name={})...'", ".", "format", "(", "dev", ".", "addr", ",", "dev", ".", "name", ")", ")", "self", ".", "_set_state", "(", "self", ".", "_STATE_CONNECTING", ")", "self", ".", "api", ".", "ble_cmd_gap_connect_direct", "(", "dev", ".", "raw_addr", ",", "0", ",", "6", ",", "14", ",", "100", ",", "50", ")", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_CONNECTING", ",", "5", ")", "if", "self", ".", "state", "!=", "self", ".", "_STATE_CONNECTED", ":", "logger", ".", "warn", "(", "'Connection failed!'", ")", "# send end procedure to cancel connection attempt", "self", ".", "_set_state", "(", "self", ".", "_STATE_GAP_END", ")", "self", ".", "api", ".", "ble_cmd_gap_end_procedure", "(", ")", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_GAP_END", ")", "all_connected", "=", "False", "continue", "conn_handle", "=", "self", ".", "conn_handles", "[", "-", "1", "]", "logger", ".", "info", "(", "'Connection OK, handle is 0x{:02X}'", ".", "format", "(", "conn_handle", ")", ")", "sk8", "=", "SK8", "(", "self", ",", "conn_handle", ",", "dev", ",", "calibration", ")", "self", ".", "_add_device", "(", "sk8", ")", "connected_devices", ".", "append", "(", "sk8", ")", "sk8", ".", "_discover_services", "(", ")", "time", ".", "sleep", "(", "0.1", ")", "# TODO", "return", "(", "all_connected", ",", "connected_devices", ")" ]
Establish a connection to one or more SK8 devices. Given a list of 1 or more :class:`ScanResult` objects, this method will attempt to create a connection to each SK8 in sequence. It will return when all connections have been attempted, although they may not all have succeeded. In addition, the dongle has a limit on simultaneous connections, which you can retrieve by calling :meth:`get_supported_connections`. If the number of supplied device names exceeds this value then the method will abort immediately. Args: devicelist (list): a list of :class:`ScanResult` instances, one for each SK8 you wish to create a connection to. calibration (bool): True if calibration data should be loaded post-connection, for each device (if available). Returns: tuple (`result`, `devices`), where `result` is a bool indicating if connections were successfully made to all given devices. If True, `devices` will contain a list of :class:`SK8` instances representing the connected SK8 devices. If False, `devices` will contain a smaller number of :class:`SK8` instances depending on the number of connections that succeeded (possibly 0).
[ "Establish", "a", "connection", "to", "one", "or", "more", "SK8", "devices", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1269-L1329
245,846
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.connect_direct
def connect_direct(self, device, calibration=True): """Establish a connection to a single SK8. Args: device: either a :class:`ScanResult` or a plain hardware address string in xx:xx:xx:xx:xx:xx format. calibration (bool): True to attempt to load calibration data for this device after connection, False otherwise. See :meth:`SK8.load_calibration`. Returns: tuple (`result`, `device`), where `result` is a bool indicating if a connection was created successfully. If `result` is True, `device` will be set to a new :class:`SK8` instance. Otherwise it will be None. """ # convert string address into a ScanResult if needed if not isinstance(device, ScanResult): if isinstance(device, str): device = ScanResult(device, fmt_addr_raw(device)) elif isinstance(device, unicode): device = device.encode('ascii') device = ScanResult(device, fmt_addr_raw(device)) else: logger.warn('Expected ScanResult, found type {} instead!'.format(type(device))) return (False, None) logger.debug('Connecting directly to device address'.format(device.addr)) # TODO check number of active connections and fail if exceeds max self._set_state(self._STATE_CONNECTING) # TODO parameters here = ??? self.api.ble_cmd_gap_connect_direct(device.raw_addr, 0, 6, 14, 100, 50) self._wait_for_state(self._STATE_CONNECTING, 5) if self.state != self._STATE_CONNECTED: logger.warn('Connection failed!') # send end procedure to cancel connection attempt self._set_state(self._STATE_GAP_END) self.api.ble_cmd_gap_end_procedure() self._wait_for_state(self._STATE_GAP_END) return (False, None) conn_handle = self.conn_handles[-1] logger.info('Connection OK, handle is 0x{:02X}'.format(conn_handle)) sk8 = SK8(self, conn_handle, device, calibration) self._add_device(sk8) sk8._discover_services() return (True, sk8)
python
def connect_direct(self, device, calibration=True): """Establish a connection to a single SK8. Args: device: either a :class:`ScanResult` or a plain hardware address string in xx:xx:xx:xx:xx:xx format. calibration (bool): True to attempt to load calibration data for this device after connection, False otherwise. See :meth:`SK8.load_calibration`. Returns: tuple (`result`, `device`), where `result` is a bool indicating if a connection was created successfully. If `result` is True, `device` will be set to a new :class:`SK8` instance. Otherwise it will be None. """ # convert string address into a ScanResult if needed if not isinstance(device, ScanResult): if isinstance(device, str): device = ScanResult(device, fmt_addr_raw(device)) elif isinstance(device, unicode): device = device.encode('ascii') device = ScanResult(device, fmt_addr_raw(device)) else: logger.warn('Expected ScanResult, found type {} instead!'.format(type(device))) return (False, None) logger.debug('Connecting directly to device address'.format(device.addr)) # TODO check number of active connections and fail if exceeds max self._set_state(self._STATE_CONNECTING) # TODO parameters here = ??? self.api.ble_cmd_gap_connect_direct(device.raw_addr, 0, 6, 14, 100, 50) self._wait_for_state(self._STATE_CONNECTING, 5) if self.state != self._STATE_CONNECTED: logger.warn('Connection failed!') # send end procedure to cancel connection attempt self._set_state(self._STATE_GAP_END) self.api.ble_cmd_gap_end_procedure() self._wait_for_state(self._STATE_GAP_END) return (False, None) conn_handle = self.conn_handles[-1] logger.info('Connection OK, handle is 0x{:02X}'.format(conn_handle)) sk8 = SK8(self, conn_handle, device, calibration) self._add_device(sk8) sk8._discover_services() return (True, sk8)
[ "def", "connect_direct", "(", "self", ",", "device", ",", "calibration", "=", "True", ")", ":", "# convert string address into a ScanResult if needed", "if", "not", "isinstance", "(", "device", ",", "ScanResult", ")", ":", "if", "isinstance", "(", "device", ",", "str", ")", ":", "device", "=", "ScanResult", "(", "device", ",", "fmt_addr_raw", "(", "device", ")", ")", "elif", "isinstance", "(", "device", ",", "unicode", ")", ":", "device", "=", "device", ".", "encode", "(", "'ascii'", ")", "device", "=", "ScanResult", "(", "device", ",", "fmt_addr_raw", "(", "device", ")", ")", "else", ":", "logger", ".", "warn", "(", "'Expected ScanResult, found type {} instead!'", ".", "format", "(", "type", "(", "device", ")", ")", ")", "return", "(", "False", ",", "None", ")", "logger", ".", "debug", "(", "'Connecting directly to device address'", ".", "format", "(", "device", ".", "addr", ")", ")", "# TODO check number of active connections and fail if exceeds max", "self", ".", "_set_state", "(", "self", ".", "_STATE_CONNECTING", ")", "# TODO parameters here = ???", "self", ".", "api", ".", "ble_cmd_gap_connect_direct", "(", "device", ".", "raw_addr", ",", "0", ",", "6", ",", "14", ",", "100", ",", "50", ")", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_CONNECTING", ",", "5", ")", "if", "self", ".", "state", "!=", "self", ".", "_STATE_CONNECTED", ":", "logger", ".", "warn", "(", "'Connection failed!'", ")", "# send end procedure to cancel connection attempt", "self", ".", "_set_state", "(", "self", ".", "_STATE_GAP_END", ")", "self", ".", "api", ".", "ble_cmd_gap_end_procedure", "(", ")", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_GAP_END", ")", "return", "(", "False", ",", "None", ")", "conn_handle", "=", "self", ".", "conn_handles", "[", "-", "1", "]", "logger", ".", "info", "(", "'Connection OK, handle is 0x{:02X}'", ".", "format", "(", "conn_handle", ")", ")", "sk8", "=", "SK8", "(", "self", ",", "conn_handle", ",", "device", ",", "calibration", ")", "self", ".", "_add_device", "(", "sk8", ")", "sk8", ".", "_discover_services", "(", ")", "return", "(", "True", ",", "sk8", ")" ]
Establish a connection to a single SK8. Args: device: either a :class:`ScanResult` or a plain hardware address string in xx:xx:xx:xx:xx:xx format. calibration (bool): True to attempt to load calibration data for this device after connection, False otherwise. See :meth:`SK8.load_calibration`. Returns: tuple (`result`, `device`), where `result` is a bool indicating if a connection was created successfully. If `result` is True, `device` will be set to a new :class:`SK8` instance. Otherwise it will be None.
[ "Establish", "a", "connection", "to", "a", "single", "SK8", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1332-L1380
245,847
andrewramsay/sk8-drivers
pysk8/pysk8/core.py
Dongle.get_supported_connections
def get_supported_connections(self): """Returns the number of supported simultaneous BLE connections. The BLED112 is capable of supporting up to 8 simultaneous BLE connections. However, the default firmware image has a limit of just 3 devices, which is a lot easier to run up against. This method retrieves the current value of this setting. Returns: int. The number of supported simultaneous connections, or -1 on error """ if self.supported_connections != -1: return self.supported_connections if self.api is None: return -1 self._set_state(self._STATE_DONGLE_COMMAND) self.api.ble_cmd_system_get_connections() self._wait_for_state(self._STATE_DONGLE_COMMAND) return self.supported_connections
python
def get_supported_connections(self): """Returns the number of supported simultaneous BLE connections. The BLED112 is capable of supporting up to 8 simultaneous BLE connections. However, the default firmware image has a limit of just 3 devices, which is a lot easier to run up against. This method retrieves the current value of this setting. Returns: int. The number of supported simultaneous connections, or -1 on error """ if self.supported_connections != -1: return self.supported_connections if self.api is None: return -1 self._set_state(self._STATE_DONGLE_COMMAND) self.api.ble_cmd_system_get_connections() self._wait_for_state(self._STATE_DONGLE_COMMAND) return self.supported_connections
[ "def", "get_supported_connections", "(", "self", ")", ":", "if", "self", ".", "supported_connections", "!=", "-", "1", ":", "return", "self", ".", "supported_connections", "if", "self", ".", "api", "is", "None", ":", "return", "-", "1", "self", ".", "_set_state", "(", "self", ".", "_STATE_DONGLE_COMMAND", ")", "self", ".", "api", ".", "ble_cmd_system_get_connections", "(", ")", "self", ".", "_wait_for_state", "(", "self", ".", "_STATE_DONGLE_COMMAND", ")", "return", "self", ".", "supported_connections" ]
Returns the number of supported simultaneous BLE connections. The BLED112 is capable of supporting up to 8 simultaneous BLE connections. However, the default firmware image has a limit of just 3 devices, which is a lot easier to run up against. This method retrieves the current value of this setting. Returns: int. The number of supported simultaneous connections, or -1 on error
[ "Returns", "the", "number", "of", "supported", "simultaneous", "BLE", "connections", "." ]
67347a71762fb421f5ae65a595def5c7879e8b0c
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/pysk8/core.py#L1382-L1403
245,848
darkfeline/mir.anidb
mir/anidb/api.py
httpapi_request
def httpapi_request(client, **params) -> 'Response': """Send a request to AniDB HTTP API. https://wiki.anidb.net/w/HTTP_API_Definition """ return requests.get( _HTTPAPI, params={ 'client': client.name, 'clientver': client.version, 'protover': 1, **params })
python
def httpapi_request(client, **params) -> 'Response': """Send a request to AniDB HTTP API. https://wiki.anidb.net/w/HTTP_API_Definition """ return requests.get( _HTTPAPI, params={ 'client': client.name, 'clientver': client.version, 'protover': 1, **params })
[ "def", "httpapi_request", "(", "client", ",", "*", "*", "params", ")", "->", "'Response'", ":", "return", "requests", ".", "get", "(", "_HTTPAPI", ",", "params", "=", "{", "'client'", ":", "client", ".", "name", ",", "'clientver'", ":", "client", ".", "version", ",", "'protover'", ":", "1", ",", "*", "*", "params", "}", ")" ]
Send a request to AniDB HTTP API. https://wiki.anidb.net/w/HTTP_API_Definition
[ "Send", "a", "request", "to", "AniDB", "HTTP", "API", "." ]
a0d25908f85fb1ff4bc595954bfc3f223f1b5acc
https://github.com/darkfeline/mir.anidb/blob/a0d25908f85fb1ff4bc595954bfc3f223f1b5acc/mir/anidb/api.py#L43-L55
245,849
darkfeline/mir.anidb
mir/anidb/api.py
unpack_xml
def unpack_xml(text) -> ET.ElementTree: """Unpack an XML string from AniDB API.""" etree: ET.ElementTree = ET.parse(io.StringIO(text)) _check_for_errors(etree) return etree
python
def unpack_xml(text) -> ET.ElementTree: """Unpack an XML string from AniDB API.""" etree: ET.ElementTree = ET.parse(io.StringIO(text)) _check_for_errors(etree) return etree
[ "def", "unpack_xml", "(", "text", ")", "->", "ET", ".", "ElementTree", ":", "etree", ":", "ET", ".", "ElementTree", "=", "ET", ".", "parse", "(", "io", ".", "StringIO", "(", "text", ")", ")", "_check_for_errors", "(", "etree", ")", "return", "etree" ]
Unpack an XML string from AniDB API.
[ "Unpack", "an", "XML", "string", "from", "AniDB", "API", "." ]
a0d25908f85fb1ff4bc595954bfc3f223f1b5acc
https://github.com/darkfeline/mir.anidb/blob/a0d25908f85fb1ff4bc595954bfc3f223f1b5acc/mir/anidb/api.py#L58-L62
245,850
darkfeline/mir.anidb
mir/anidb/api.py
_check_for_errors
def _check_for_errors(etree: ET.ElementTree): """Check AniDB response XML tree for errors.""" if etree.getroot().tag == 'error': raise APIError(etree.getroot().text)
python
def _check_for_errors(etree: ET.ElementTree): """Check AniDB response XML tree for errors.""" if etree.getroot().tag == 'error': raise APIError(etree.getroot().text)
[ "def", "_check_for_errors", "(", "etree", ":", "ET", ".", "ElementTree", ")", ":", "if", "etree", ".", "getroot", "(", ")", ".", "tag", "==", "'error'", ":", "raise", "APIError", "(", "etree", ".", "getroot", "(", ")", ".", "text", ")" ]
Check AniDB response XML tree for errors.
[ "Check", "AniDB", "response", "XML", "tree", "for", "errors", "." ]
a0d25908f85fb1ff4bc595954bfc3f223f1b5acc
https://github.com/darkfeline/mir.anidb/blob/a0d25908f85fb1ff4bc595954bfc3f223f1b5acc/mir/anidb/api.py#L65-L68
245,851
kyleam/wcut
wcut/_core.py
extract_fields
def extract_fields(lines, delim, searches, match_lineno=1, **kwargs): """Return generator of fields matching `searches`. Parameters ---------- lines : iterable Provides line number (1-based) and line (str) delim : str Delimiter to split line by to produce fields searches : iterable Returns search (str) to match against line fields. match_lineno : int Line number of line to split and search fields Remaining keyword arguments are passed to `match_fields`. """ keep_idx = [] for lineno, line in lines: if lineno < match_lineno or delim not in line: if lineno == match_lineno: raise WcutError('Delimter not found in line {}'.format( match_lineno)) yield [line] continue fields = line.split(delim) if lineno == match_lineno: keep_idx = list(match_fields(fields, searches, **kwargs)) keep_fields = [fields[i] for i in keep_idx] if keep_fields: yield keep_fields
python
def extract_fields(lines, delim, searches, match_lineno=1, **kwargs): """Return generator of fields matching `searches`. Parameters ---------- lines : iterable Provides line number (1-based) and line (str) delim : str Delimiter to split line by to produce fields searches : iterable Returns search (str) to match against line fields. match_lineno : int Line number of line to split and search fields Remaining keyword arguments are passed to `match_fields`. """ keep_idx = [] for lineno, line in lines: if lineno < match_lineno or delim not in line: if lineno == match_lineno: raise WcutError('Delimter not found in line {}'.format( match_lineno)) yield [line] continue fields = line.split(delim) if lineno == match_lineno: keep_idx = list(match_fields(fields, searches, **kwargs)) keep_fields = [fields[i] for i in keep_idx] if keep_fields: yield keep_fields
[ "def", "extract_fields", "(", "lines", ",", "delim", ",", "searches", ",", "match_lineno", "=", "1", ",", "*", "*", "kwargs", ")", ":", "keep_idx", "=", "[", "]", "for", "lineno", ",", "line", "in", "lines", ":", "if", "lineno", "<", "match_lineno", "or", "delim", "not", "in", "line", ":", "if", "lineno", "==", "match_lineno", ":", "raise", "WcutError", "(", "'Delimter not found in line {}'", ".", "format", "(", "match_lineno", ")", ")", "yield", "[", "line", "]", "continue", "fields", "=", "line", ".", "split", "(", "delim", ")", "if", "lineno", "==", "match_lineno", ":", "keep_idx", "=", "list", "(", "match_fields", "(", "fields", ",", "searches", ",", "*", "*", "kwargs", ")", ")", "keep_fields", "=", "[", "fields", "[", "i", "]", "for", "i", "in", "keep_idx", "]", "if", "keep_fields", ":", "yield", "keep_fields" ]
Return generator of fields matching `searches`. Parameters ---------- lines : iterable Provides line number (1-based) and line (str) delim : str Delimiter to split line by to produce fields searches : iterable Returns search (str) to match against line fields. match_lineno : int Line number of line to split and search fields Remaining keyword arguments are passed to `match_fields`.
[ "Return", "generator", "of", "fields", "matching", "searches", "." ]
36f6e10a4c3b4dae274a55010463c6acce83bc71
https://github.com/kyleam/wcut/blob/36f6e10a4c3b4dae274a55010463c6acce83bc71/wcut/_core.py#L36-L67
245,852
kyleam/wcut
wcut/_core.py
match_fields
def match_fields(fields, searches, ignore_case=False, wholename=False, complement=False): """Return fields that match searches. Parameters ---------- fields : iterable searches : iterable ignore_case, wholename, complement : boolean """ if ignore_case: fields = [f.lower() for f in fields] searches = [s.lower() for s in searches] if wholename: match_found = _complete_match else: match_found = _partial_match fields = [(i, field) for i, field in enumerate(fields)] matched = [] for search, (idx, field) in itertools.product(searches, fields): if not search: ## don't return all fields for '' continue if match_found(search, field) and idx not in matched: matched.append(idx) if complement: matched = [idx for idx in list(zip(*fields))[0] if idx not in matched] return matched
python
def match_fields(fields, searches, ignore_case=False, wholename=False, complement=False): """Return fields that match searches. Parameters ---------- fields : iterable searches : iterable ignore_case, wholename, complement : boolean """ if ignore_case: fields = [f.lower() for f in fields] searches = [s.lower() for s in searches] if wholename: match_found = _complete_match else: match_found = _partial_match fields = [(i, field) for i, field in enumerate(fields)] matched = [] for search, (idx, field) in itertools.product(searches, fields): if not search: ## don't return all fields for '' continue if match_found(search, field) and idx not in matched: matched.append(idx) if complement: matched = [idx for idx in list(zip(*fields))[0] if idx not in matched] return matched
[ "def", "match_fields", "(", "fields", ",", "searches", ",", "ignore_case", "=", "False", ",", "wholename", "=", "False", ",", "complement", "=", "False", ")", ":", "if", "ignore_case", ":", "fields", "=", "[", "f", ".", "lower", "(", ")", "for", "f", "in", "fields", "]", "searches", "=", "[", "s", ".", "lower", "(", ")", "for", "s", "in", "searches", "]", "if", "wholename", ":", "match_found", "=", "_complete_match", "else", ":", "match_found", "=", "_partial_match", "fields", "=", "[", "(", "i", ",", "field", ")", "for", "i", ",", "field", "in", "enumerate", "(", "fields", ")", "]", "matched", "=", "[", "]", "for", "search", ",", "(", "idx", ",", "field", ")", "in", "itertools", ".", "product", "(", "searches", ",", "fields", ")", ":", "if", "not", "search", ":", "## don't return all fields for ''", "continue", "if", "match_found", "(", "search", ",", "field", ")", "and", "idx", "not", "in", "matched", ":", "matched", ".", "append", "(", "idx", ")", "if", "complement", ":", "matched", "=", "[", "idx", "for", "idx", "in", "list", "(", "zip", "(", "*", "fields", ")", ")", "[", "0", "]", "if", "idx", "not", "in", "matched", "]", "return", "matched" ]
Return fields that match searches. Parameters ---------- fields : iterable searches : iterable ignore_case, wholename, complement : boolean
[ "Return", "fields", "that", "match", "searches", "." ]
36f6e10a4c3b4dae274a55010463c6acce83bc71
https://github.com/kyleam/wcut/blob/36f6e10a4c3b4dae274a55010463c6acce83bc71/wcut/_core.py#L70-L99
245,853
etscrivner/nose-perfdump
perfdump/connection.py
SqliteConnection.connect
def connect(cls, dbname): """Create a new connection to the SQLite3 database. :param dbname: The database name :type dbname: str """ test_times_schema = """ CREATE TABLE IF NOT EXISTS test_times ( file text, module text, class text, func text, elapsed float ) """ setup_times_schema = """ CREATE TABLE IF NOT EXISTS setup_times ( file text, module text, class text, func text, elapsed float ) """ schemas = [test_times_schema, setup_times_schema] db_file = '{}.db'.format(dbname) cls.connection = sqlite3.connect(db_file) for s in schemas: cls.connection.execute(s)
python
def connect(cls, dbname): """Create a new connection to the SQLite3 database. :param dbname: The database name :type dbname: str """ test_times_schema = """ CREATE TABLE IF NOT EXISTS test_times ( file text, module text, class text, func text, elapsed float ) """ setup_times_schema = """ CREATE TABLE IF NOT EXISTS setup_times ( file text, module text, class text, func text, elapsed float ) """ schemas = [test_times_schema, setup_times_schema] db_file = '{}.db'.format(dbname) cls.connection = sqlite3.connect(db_file) for s in schemas: cls.connection.execute(s)
[ "def", "connect", "(", "cls", ",", "dbname", ")", ":", "test_times_schema", "=", "\"\"\"\n CREATE TABLE IF NOT EXISTS test_times (\n file text,\n module text,\n class text,\n func text,\n elapsed float\n )\n \"\"\"", "setup_times_schema", "=", "\"\"\"\n CREATE TABLE IF NOT EXISTS setup_times (\n file text,\n module text,\n class text,\n func text,\n elapsed float\n )\n \"\"\"", "schemas", "=", "[", "test_times_schema", ",", "setup_times_schema", "]", "db_file", "=", "'{}.db'", ".", "format", "(", "dbname", ")", "cls", ".", "connection", "=", "sqlite3", ".", "connect", "(", "db_file", ")", "for", "s", "in", "schemas", ":", "cls", ".", "connection", ".", "execute", "(", "s", ")" ]
Create a new connection to the SQLite3 database. :param dbname: The database name :type dbname: str
[ "Create", "a", "new", "connection", "to", "the", "SQLite3", "database", "." ]
a203a68495d30346fab43fb903cb60cd29b17d49
https://github.com/etscrivner/nose-perfdump/blob/a203a68495d30346fab43fb903cb60cd29b17d49/perfdump/connection.py#L38-L72
245,854
etscrivner/nose-perfdump
perfdump/connection.py
SqliteConnection.get
def get(cls, dbname="perfdump"): """Returns the singleton connection to the SQLite3 database. :param dbname: The database name :type dbname: str """ try: return cls.connection except: cls.connect(dbname) return cls.connection
python
def get(cls, dbname="perfdump"): """Returns the singleton connection to the SQLite3 database. :param dbname: The database name :type dbname: str """ try: return cls.connection except: cls.connect(dbname) return cls.connection
[ "def", "get", "(", "cls", ",", "dbname", "=", "\"perfdump\"", ")", ":", "try", ":", "return", "cls", ".", "connection", "except", ":", "cls", ".", "connect", "(", "dbname", ")", "return", "cls", ".", "connection" ]
Returns the singleton connection to the SQLite3 database. :param dbname: The database name :type dbname: str
[ "Returns", "the", "singleton", "connection", "to", "the", "SQLite3", "database", "." ]
a203a68495d30346fab43fb903cb60cd29b17d49
https://github.com/etscrivner/nose-perfdump/blob/a203a68495d30346fab43fb903cb60cd29b17d49/perfdump/connection.py#L75-L86
245,855
AndreLouisCaron/runwith
features/environment.py
before_scenario
def before_scenario(context, scenario): """Prepare a fresh environment for each scenario.""" # Prepare a new temporary directory. context.directory = testfixtures.TempDirectory(create=True) context.old_cwd = os.getcwd() context.new_cwd = context.directory.path # Move into our new working directory. os.chdir(context.new_cwd)
python
def before_scenario(context, scenario): """Prepare a fresh environment for each scenario.""" # Prepare a new temporary directory. context.directory = testfixtures.TempDirectory(create=True) context.old_cwd = os.getcwd() context.new_cwd = context.directory.path # Move into our new working directory. os.chdir(context.new_cwd)
[ "def", "before_scenario", "(", "context", ",", "scenario", ")", ":", "# Prepare a new temporary directory.", "context", ".", "directory", "=", "testfixtures", ".", "TempDirectory", "(", "create", "=", "True", ")", "context", ".", "old_cwd", "=", "os", ".", "getcwd", "(", ")", "context", ".", "new_cwd", "=", "context", ".", "directory", ".", "path", "# Move into our new working directory.", "os", ".", "chdir", "(", "context", ".", "new_cwd", ")" ]
Prepare a fresh environment for each scenario.
[ "Prepare", "a", "fresh", "environment", "for", "each", "scenario", "." ]
cfa2b6ae67d73ec5b24f1502a37060d838276e8b
https://github.com/AndreLouisCaron/runwith/blob/cfa2b6ae67d73ec5b24f1502a37060d838276e8b/features/environment.py#L8-L17
245,856
AndreLouisCaron/runwith
features/environment.py
after_scenario
def after_scenario(context, scenario): """Leave the environment fresh after each scenario.""" # Move back into the original working directory. os.chdir(context.old_cwd) # Delete all content generated by the test. context.directory.cleanup()
python
def after_scenario(context, scenario): """Leave the environment fresh after each scenario.""" # Move back into the original working directory. os.chdir(context.old_cwd) # Delete all content generated by the test. context.directory.cleanup()
[ "def", "after_scenario", "(", "context", ",", "scenario", ")", ":", "# Move back into the original working directory.", "os", ".", "chdir", "(", "context", ".", "old_cwd", ")", "# Delete all content generated by the test.", "context", ".", "directory", ".", "cleanup", "(", ")" ]
Leave the environment fresh after each scenario.
[ "Leave", "the", "environment", "fresh", "after", "each", "scenario", "." ]
cfa2b6ae67d73ec5b24f1502a37060d838276e8b
https://github.com/AndreLouisCaron/runwith/blob/cfa2b6ae67d73ec5b24f1502a37060d838276e8b/features/environment.py#L20-L27
245,857
razor-x/dichalcogenides
dichalcogenides/parameters/parameters.py
Parameters.get
def get(self, name): """Get a parameter object by name. :param name: Name of the parameter object. :type name: str :return: The parameter. :rtype: Parameter """ parameter = next((p for p in self.parameters if p.name == name), None) if parameter is None: raise LookupError("Cannot find parameter '" + name + "'.") return parameter
python
def get(self, name): """Get a parameter object by name. :param name: Name of the parameter object. :type name: str :return: The parameter. :rtype: Parameter """ parameter = next((p for p in self.parameters if p.name == name), None) if parameter is None: raise LookupError("Cannot find parameter '" + name + "'.") return parameter
[ "def", "get", "(", "self", ",", "name", ")", ":", "parameter", "=", "next", "(", "(", "p", "for", "p", "in", "self", ".", "parameters", "if", "p", ".", "name", "==", "name", ")", ",", "None", ")", "if", "parameter", "is", "None", ":", "raise", "LookupError", "(", "\"Cannot find parameter '\"", "+", "name", "+", "\"'.\"", ")", "return", "parameter" ]
Get a parameter object by name. :param name: Name of the parameter object. :type name: str :return: The parameter. :rtype: Parameter
[ "Get", "a", "parameter", "object", "by", "name", "." ]
0fa1995a3a328b679c9926f73239d0ecdc6e5d3d
https://github.com/razor-x/dichalcogenides/blob/0fa1995a3a328b679c9926f73239d0ecdc6e5d3d/dichalcogenides/parameters/parameters.py#L69-L81
245,858
razor-x/dichalcogenides
dichalcogenides/parameters/parameters.py
Parameters.add_parameter
def add_parameter(self, name, value, meta=None): """Add a parameter to the parameter list. :param name: New parameter's name. :type name: str :param value: New parameter's value. :type value: float :param meta: New parameter's meta property. :type meta: dict """ parameter = Parameter(name, value) if meta: parameter.meta = meta self.parameters.append(parameter)
python
def add_parameter(self, name, value, meta=None): """Add a parameter to the parameter list. :param name: New parameter's name. :type name: str :param value: New parameter's value. :type value: float :param meta: New parameter's meta property. :type meta: dict """ parameter = Parameter(name, value) if meta: parameter.meta = meta self.parameters.append(parameter)
[ "def", "add_parameter", "(", "self", ",", "name", ",", "value", ",", "meta", "=", "None", ")", ":", "parameter", "=", "Parameter", "(", "name", ",", "value", ")", "if", "meta", ":", "parameter", ".", "meta", "=", "meta", "self", ".", "parameters", ".", "append", "(", "parameter", ")" ]
Add a parameter to the parameter list. :param name: New parameter's name. :type name: str :param value: New parameter's value. :type value: float :param meta: New parameter's meta property. :type meta: dict
[ "Add", "a", "parameter", "to", "the", "parameter", "list", "." ]
0fa1995a3a328b679c9926f73239d0ecdc6e5d3d
https://github.com/razor-x/dichalcogenides/blob/0fa1995a3a328b679c9926f73239d0ecdc6e5d3d/dichalcogenides/parameters/parameters.py#L83-L97
245,859
razor-x/dichalcogenides
dichalcogenides/parameters/parameters.py
Parameters.load_file
def load_file(self, path): """Load a YAML file with parameter data and other metadata. :param path: Path to YAML file. :type path: str The data in the YAML file is used to set the properties for the instance. The YAML file must contain a ``parameters`` key. It may optionally include any keys defined in :attr:`.property_keys`. .. code-block:: yaml # data.yml --- name: Shield frequencies parameters: - name: a value: 24.50 - name: β value: 42.10 meta: phase_inverted: true .. code-block:: python parameters = Parameters('data.yml') parameters.name #=> 'Shield frequencies' parameters.get_value('a') #=> 24.50 parameters.get_meta('β')['phase_inverted'] #=> true """ data = yaml.load(open(path, 'r')) for key in self.property_keys: if key in data: setattr(self, key, data[key]) self.parameters = self.parameter_list(data['parameters'])
python
def load_file(self, path): """Load a YAML file with parameter data and other metadata. :param path: Path to YAML file. :type path: str The data in the YAML file is used to set the properties for the instance. The YAML file must contain a ``parameters`` key. It may optionally include any keys defined in :attr:`.property_keys`. .. code-block:: yaml # data.yml --- name: Shield frequencies parameters: - name: a value: 24.50 - name: β value: 42.10 meta: phase_inverted: true .. code-block:: python parameters = Parameters('data.yml') parameters.name #=> 'Shield frequencies' parameters.get_value('a') #=> 24.50 parameters.get_meta('β')['phase_inverted'] #=> true """ data = yaml.load(open(path, 'r')) for key in self.property_keys: if key in data: setattr(self, key, data[key]) self.parameters = self.parameter_list(data['parameters'])
[ "def", "load_file", "(", "self", ",", "path", ")", ":", "data", "=", "yaml", ".", "load", "(", "open", "(", "path", ",", "'r'", ")", ")", "for", "key", "in", "self", ".", "property_keys", ":", "if", "key", "in", "data", ":", "setattr", "(", "self", ",", "key", ",", "data", "[", "key", "]", ")", "self", ".", "parameters", "=", "self", ".", "parameter_list", "(", "data", "[", "'parameters'", "]", ")" ]
Load a YAML file with parameter data and other metadata. :param path: Path to YAML file. :type path: str The data in the YAML file is used to set the properties for the instance. The YAML file must contain a ``parameters`` key. It may optionally include any keys defined in :attr:`.property_keys`. .. code-block:: yaml # data.yml --- name: Shield frequencies parameters: - name: a value: 24.50 - name: β value: 42.10 meta: phase_inverted: true .. code-block:: python parameters = Parameters('data.yml') parameters.name #=> 'Shield frequencies' parameters.get_value('a') #=> 24.50 parameters.get_meta('β')['phase_inverted'] #=> true
[ "Load", "a", "YAML", "file", "with", "parameter", "data", "and", "other", "metadata", "." ]
0fa1995a3a328b679c9926f73239d0ecdc6e5d3d
https://github.com/razor-x/dichalcogenides/blob/0fa1995a3a328b679c9926f73239d0ecdc6e5d3d/dichalcogenides/parameters/parameters.py#L126-L160
245,860
razor-x/dichalcogenides
dichalcogenides/parameters/parameters.py
Parameters.parameter_list
def parameter_list(data): """Create a list of parameter objects from a dict. :param data: Dictionary to convert to parameter list. :type data: dict :return: Parameter list. :rtype: dict """ items = [] for item in data: param = Parameter(item['name'], item['value']) if 'meta' in item: param.meta = item['meta'] items.append(param) return items
python
def parameter_list(data): """Create a list of parameter objects from a dict. :param data: Dictionary to convert to parameter list. :type data: dict :return: Parameter list. :rtype: dict """ items = [] for item in data: param = Parameter(item['name'], item['value']) if 'meta' in item: param.meta = item['meta'] items.append(param) return items
[ "def", "parameter_list", "(", "data", ")", ":", "items", "=", "[", "]", "for", "item", "in", "data", ":", "param", "=", "Parameter", "(", "item", "[", "'name'", "]", ",", "item", "[", "'value'", "]", ")", "if", "'meta'", "in", "item", ":", "param", ".", "meta", "=", "item", "[", "'meta'", "]", "items", ".", "append", "(", "param", ")", "return", "items" ]
Create a list of parameter objects from a dict. :param data: Dictionary to convert to parameter list. :type data: dict :return: Parameter list. :rtype: dict
[ "Create", "a", "list", "of", "parameter", "objects", "from", "a", "dict", "." ]
0fa1995a3a328b679c9926f73239d0ecdc6e5d3d
https://github.com/razor-x/dichalcogenides/blob/0fa1995a3a328b679c9926f73239d0ecdc6e5d3d/dichalcogenides/parameters/parameters.py#L163-L177
245,861
artisanofcode/python-broadway
broadway/app.py
Factory.add_blueprint
def add_blueprint(self, blueprint, **options): """ Specify a blueprint to be registered with the application. Additional options will be passed to :meth:`~Flask.register_blueprint` when the application is created. .. code-block:: python factory = Factory() factory.add_blueprint('myapp.views:blueprint', url_prefix='/foo') :param blueprint: import path to blueprint object :type blueprint: str :param options: options to pass to the blueprint :type options: dict """ instance = werkzeug.utils.import_string(blueprint) self._blueprints.append((instance, options))
python
def add_blueprint(self, blueprint, **options): """ Specify a blueprint to be registered with the application. Additional options will be passed to :meth:`~Flask.register_blueprint` when the application is created. .. code-block:: python factory = Factory() factory.add_blueprint('myapp.views:blueprint', url_prefix='/foo') :param blueprint: import path to blueprint object :type blueprint: str :param options: options to pass to the blueprint :type options: dict """ instance = werkzeug.utils.import_string(blueprint) self._blueprints.append((instance, options))
[ "def", "add_blueprint", "(", "self", ",", "blueprint", ",", "*", "*", "options", ")", ":", "instance", "=", "werkzeug", ".", "utils", ".", "import_string", "(", "blueprint", ")", "self", ".", "_blueprints", ".", "append", "(", "(", "instance", ",", "options", ")", ")" ]
Specify a blueprint to be registered with the application. Additional options will be passed to :meth:`~Flask.register_blueprint` when the application is created. .. code-block:: python factory = Factory() factory.add_blueprint('myapp.views:blueprint', url_prefix='/foo') :param blueprint: import path to blueprint object :type blueprint: str :param options: options to pass to the blueprint :type options: dict
[ "Specify", "a", "blueprint", "to", "be", "registered", "with", "the", "application", "." ]
a051ca5a922ecb38a541df59e8740e2a047d9a4a
https://github.com/artisanofcode/python-broadway/blob/a051ca5a922ecb38a541df59e8740e2a047d9a4a/broadway/app.py#L85-L104
245,862
artisanofcode/python-broadway
broadway/app.py
Factory.add_extension
def add_extension(self, extension): """ Specify a broadway extension to initialise .. code-block:: python factory = Factory() factory.add_extension('broadway_sqlalchemy') :param extension: import path to extension :type extension: str """ instance = werkzeug.utils.import_string(extension) if hasattr(instance, 'register'): instance.register(self) self._extensions.append(instance)
python
def add_extension(self, extension): """ Specify a broadway extension to initialise .. code-block:: python factory = Factory() factory.add_extension('broadway_sqlalchemy') :param extension: import path to extension :type extension: str """ instance = werkzeug.utils.import_string(extension) if hasattr(instance, 'register'): instance.register(self) self._extensions.append(instance)
[ "def", "add_extension", "(", "self", ",", "extension", ")", ":", "instance", "=", "werkzeug", ".", "utils", ".", "import_string", "(", "extension", ")", "if", "hasattr", "(", "instance", ",", "'register'", ")", ":", "instance", ".", "register", "(", "self", ")", "self", ".", "_extensions", ".", "append", "(", "instance", ")" ]
Specify a broadway extension to initialise .. code-block:: python factory = Factory() factory.add_extension('broadway_sqlalchemy') :param extension: import path to extension :type extension: str
[ "Specify", "a", "broadway", "extension", "to", "initialise" ]
a051ca5a922ecb38a541df59e8740e2a047d9a4a
https://github.com/artisanofcode/python-broadway/blob/a051ca5a922ecb38a541df59e8740e2a047d9a4a/broadway/app.py#L128-L146
245,863
MacHu-GWU/windtalker-project
windtalker/asymmetric.py
AsymmetricCipher.newkeys
def newkeys(nbits=1024): """ Create a new pair of public and private key pair to use. """ pubkey, privkey = rsa.newkeys(nbits, poolsize=1) return pubkey, privkey
python
def newkeys(nbits=1024): """ Create a new pair of public and private key pair to use. """ pubkey, privkey = rsa.newkeys(nbits, poolsize=1) return pubkey, privkey
[ "def", "newkeys", "(", "nbits", "=", "1024", ")", ":", "pubkey", ",", "privkey", "=", "rsa", ".", "newkeys", "(", "nbits", ",", "poolsize", "=", "1", ")", "return", "pubkey", ",", "privkey" ]
Create a new pair of public and private key pair to use.
[ "Create", "a", "new", "pair", "of", "public", "and", "private", "key", "pair", "to", "use", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/asymmetric.py#L48-L53
245,864
MacHu-GWU/windtalker-project
windtalker/asymmetric.py
AsymmetricCipher.encrypt
def encrypt(self, binary, use_sign=True): """ Encrypt binary data. **中文文档** - 发送消息时只需要对方的pubkey - 如需使用签名, 则双方都需要持有对方的pubkey """ token = rsa.encrypt(binary, self.his_pubkey) # encrypt it if use_sign: self.sign = rsa.sign(binary, self.my_privkey, "SHA-1") # sign it return token
python
def encrypt(self, binary, use_sign=True): """ Encrypt binary data. **中文文档** - 发送消息时只需要对方的pubkey - 如需使用签名, 则双方都需要持有对方的pubkey """ token = rsa.encrypt(binary, self.his_pubkey) # encrypt it if use_sign: self.sign = rsa.sign(binary, self.my_privkey, "SHA-1") # sign it return token
[ "def", "encrypt", "(", "self", ",", "binary", ",", "use_sign", "=", "True", ")", ":", "token", "=", "rsa", ".", "encrypt", "(", "binary", ",", "self", ".", "his_pubkey", ")", "# encrypt it", "if", "use_sign", ":", "self", ".", "sign", "=", "rsa", ".", "sign", "(", "binary", ",", "self", ".", "my_privkey", ",", "\"SHA-1\"", ")", "# sign it", "return", "token" ]
Encrypt binary data. **中文文档** - 发送消息时只需要对方的pubkey - 如需使用签名, 则双方都需要持有对方的pubkey
[ "Encrypt", "binary", "data", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/asymmetric.py#L55-L67
245,865
MacHu-GWU/windtalker-project
windtalker/asymmetric.py
AsymmetricCipher.decrypt
def decrypt(self, token, signature=None): """ Decrypt binary data. **中文文档** - 接收消息时只需要自己的privkey - 如需使用签名, 则双方都需要持有对方的pubkey """ binary = rsa.decrypt(token, self.my_privkey) if signature: rsa.verify(binary, signature, self.his_pubkey) return binary
python
def decrypt(self, token, signature=None): """ Decrypt binary data. **中文文档** - 接收消息时只需要自己的privkey - 如需使用签名, 则双方都需要持有对方的pubkey """ binary = rsa.decrypt(token, self.my_privkey) if signature: rsa.verify(binary, signature, self.his_pubkey) return binary
[ "def", "decrypt", "(", "self", ",", "token", ",", "signature", "=", "None", ")", ":", "binary", "=", "rsa", ".", "decrypt", "(", "token", ",", "self", ".", "my_privkey", ")", "if", "signature", ":", "rsa", ".", "verify", "(", "binary", ",", "signature", ",", "self", ".", "his_pubkey", ")", "return", "binary" ]
Decrypt binary data. **中文文档** - 接收消息时只需要自己的privkey - 如需使用签名, 则双方都需要持有对方的pubkey
[ "Decrypt", "binary", "data", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/asymmetric.py#L69-L81
245,866
MacHu-GWU/windtalker-project
windtalker/asymmetric.py
AsymmetricCipher.encrypt_file
def encrypt_file(self, path, output_path=None, overwrite=False, enable_verbose=True): """ Encrypt a file using rsa. RSA for big file encryption is very slow. For big file, I recommend to use symmetric encryption and use RSA to encrypt the password. """ path, output_path = files.process_dst_overwrite_args( src=path, dst=output_path, overwrite=overwrite, src_to_dst_func=files.get_encrpyted_path, ) with open(path, "rb") as infile, open(output_path, "wb") as outfile: encrypt_bigfile(infile, outfile, self.his_pubkey)
python
def encrypt_file(self, path, output_path=None, overwrite=False, enable_verbose=True): """ Encrypt a file using rsa. RSA for big file encryption is very slow. For big file, I recommend to use symmetric encryption and use RSA to encrypt the password. """ path, output_path = files.process_dst_overwrite_args( src=path, dst=output_path, overwrite=overwrite, src_to_dst_func=files.get_encrpyted_path, ) with open(path, "rb") as infile, open(output_path, "wb") as outfile: encrypt_bigfile(infile, outfile, self.his_pubkey)
[ "def", "encrypt_file", "(", "self", ",", "path", ",", "output_path", "=", "None", ",", "overwrite", "=", "False", ",", "enable_verbose", "=", "True", ")", ":", "path", ",", "output_path", "=", "files", ".", "process_dst_overwrite_args", "(", "src", "=", "path", ",", "dst", "=", "output_path", ",", "overwrite", "=", "overwrite", ",", "src_to_dst_func", "=", "files", ".", "get_encrpyted_path", ",", ")", "with", "open", "(", "path", ",", "\"rb\"", ")", "as", "infile", ",", "open", "(", "output_path", ",", "\"wb\"", ")", "as", "outfile", ":", "encrypt_bigfile", "(", "infile", ",", "outfile", ",", "self", ".", "his_pubkey", ")" ]
Encrypt a file using rsa. RSA for big file encryption is very slow. For big file, I recommend to use symmetric encryption and use RSA to encrypt the password.
[ "Encrypt", "a", "file", "using", "rsa", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/asymmetric.py#L83-L100
245,867
MacHu-GWU/windtalker-project
windtalker/asymmetric.py
AsymmetricCipher.decrypt_file
def decrypt_file(self, path, output_path=None, overwrite=False, enable_verbose=True): """ Decrypt a file using rsa. """ path, output_path = files.process_dst_overwrite_args( src=path, dst=output_path, overwrite=overwrite, src_to_dst_func=files.get_decrpyted_path, ) with open(path, "rb") as infile, open(output_path, "wb") as outfile: decrypt_bigfile(infile, outfile, self.my_privkey)
python
def decrypt_file(self, path, output_path=None, overwrite=False, enable_verbose=True): """ Decrypt a file using rsa. """ path, output_path = files.process_dst_overwrite_args( src=path, dst=output_path, overwrite=overwrite, src_to_dst_func=files.get_decrpyted_path, ) with open(path, "rb") as infile, open(output_path, "wb") as outfile: decrypt_bigfile(infile, outfile, self.my_privkey)
[ "def", "decrypt_file", "(", "self", ",", "path", ",", "output_path", "=", "None", ",", "overwrite", "=", "False", ",", "enable_verbose", "=", "True", ")", ":", "path", ",", "output_path", "=", "files", ".", "process_dst_overwrite_args", "(", "src", "=", "path", ",", "dst", "=", "output_path", ",", "overwrite", "=", "overwrite", ",", "src_to_dst_func", "=", "files", ".", "get_decrpyted_path", ",", ")", "with", "open", "(", "path", ",", "\"rb\"", ")", "as", "infile", ",", "open", "(", "output_path", ",", "\"wb\"", ")", "as", "outfile", ":", "decrypt_bigfile", "(", "infile", ",", "outfile", ",", "self", ".", "my_privkey", ")" ]
Decrypt a file using rsa.
[ "Decrypt", "a", "file", "using", "rsa", "." ]
1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce
https://github.com/MacHu-GWU/windtalker-project/blob/1dcff7c3692d5883cf1b55d1ea745723cfc6c3ce/windtalker/asymmetric.py#L102-L116
245,868
townsenddw/jhubctl
jhubctl/main.py
JhubctlApp.print_subcommands
def print_subcommands(self): """Print the subcommand part of the help.""" lines = ["Call"] lines.append('-'*len(lines[-1])) lines.append('') lines.append("> jhubctl <subcommand> <resource-type> <resource-name>") lines.append('') lines.append("Subcommands") lines.append('-'*len(lines[-1])) lines.append('') for name, subcommand in self.subcommands.items(): lines.append(name) lines.append(indent(subcommand[1])) lines.append('') print(os.linesep.join(lines))
python
def print_subcommands(self): """Print the subcommand part of the help.""" lines = ["Call"] lines.append('-'*len(lines[-1])) lines.append('') lines.append("> jhubctl <subcommand> <resource-type> <resource-name>") lines.append('') lines.append("Subcommands") lines.append('-'*len(lines[-1])) lines.append('') for name, subcommand in self.subcommands.items(): lines.append(name) lines.append(indent(subcommand[1])) lines.append('') print(os.linesep.join(lines))
[ "def", "print_subcommands", "(", "self", ")", ":", "lines", "=", "[", "\"Call\"", "]", "lines", ".", "append", "(", "'-'", "*", "len", "(", "lines", "[", "-", "1", "]", ")", ")", "lines", ".", "append", "(", "''", ")", "lines", ".", "append", "(", "\"> jhubctl <subcommand> <resource-type> <resource-name>\"", ")", "lines", ".", "append", "(", "''", ")", "lines", ".", "append", "(", "\"Subcommands\"", ")", "lines", ".", "append", "(", "'-'", "*", "len", "(", "lines", "[", "-", "1", "]", ")", ")", "lines", ".", "append", "(", "''", ")", "for", "name", ",", "subcommand", "in", "self", ".", "subcommands", ".", "items", "(", ")", ":", "lines", ".", "append", "(", "name", ")", "lines", ".", "append", "(", "indent", "(", "subcommand", "[", "1", "]", ")", ")", "lines", ".", "append", "(", "''", ")", "print", "(", "os", ".", "linesep", ".", "join", "(", "lines", ")", ")" ]
Print the subcommand part of the help.
[ "Print", "the", "subcommand", "part", "of", "the", "help", "." ]
c8c20f86a16e9d01dd90e4607d81423417cc773b
https://github.com/townsenddw/jhubctl/blob/c8c20f86a16e9d01dd90e4607d81423417cc773b/jhubctl/main.py#L112-L128
245,869
townsenddw/jhubctl
jhubctl/main.py
JhubctlApp.parse_command_line
def parse_command_line(self, argv=None): """Parse the jhubctl command line arguments. This overwrites traitlets' default `parse_command_line` method and tailors it to jhubctl's needs. """ argv = sys.argv[1:] if argv is None else argv self.argv = [py3compat.cast_unicode(arg) for arg in argv] # Append Provider Class to the list of configurable items. ProviderClass = getattr(providers, self.provider_type) self.classes.append(ProviderClass) if any(x in self.argv for x in ('-h', '--help-all', '--help')): self.print_help('--help-all' in self.argv) self.exit(0) if '--version' in self.argv or '-V' in self.argv: self.print_version() self.exit(0) # Generate a configuration file if flag is given. if '--generate-config' in self.argv: conf = self.generate_config_file() with open(self.config_file, 'w') as f: f.write(conf) self.exit(0) # If not config, parse commands. ## Run sanity checks. # Check that the minimum number of arguments have been called. if len(self.argv) < 2: raise JhubctlError( "Not enough arguments. \n\n" "Expected: jhubctl <action> <resource> <name>") # Check action self.resource_action = self.argv[0] if self.resource_action not in self.subcommands: raise JhubctlError( f"Subcommand is not recognized; must be one of these: {self.subcommands}") # Check resource self.resource_type = self.argv[1] if self.resource_type not in self.resources: raise JhubctlError( f"First argument after a subcommand must one of these" f"resources: {self.resources}" ) # Get name of resource. try: self.resource_name = self.argv[2] except IndexError: if self.resource_action != "get": raise JhubctlError( "Not enough arguments. \n\n" "Expected: jhubctl <action> <resource> <name>") else: self.resource_name = None # flatten flags&aliases, so cl-args get appropriate priority: flags, aliases = self.flatten_flags() loader = KVArgParseConfigLoader(argv=argv, aliases=aliases, flags=flags, log=self.log) config = loader.load_config() self.update_config(config) # store unparsed args in extra_args self.extra_args = loader.extra_args
python
def parse_command_line(self, argv=None): """Parse the jhubctl command line arguments. This overwrites traitlets' default `parse_command_line` method and tailors it to jhubctl's needs. """ argv = sys.argv[1:] if argv is None else argv self.argv = [py3compat.cast_unicode(arg) for arg in argv] # Append Provider Class to the list of configurable items. ProviderClass = getattr(providers, self.provider_type) self.classes.append(ProviderClass) if any(x in self.argv for x in ('-h', '--help-all', '--help')): self.print_help('--help-all' in self.argv) self.exit(0) if '--version' in self.argv or '-V' in self.argv: self.print_version() self.exit(0) # Generate a configuration file if flag is given. if '--generate-config' in self.argv: conf = self.generate_config_file() with open(self.config_file, 'w') as f: f.write(conf) self.exit(0) # If not config, parse commands. ## Run sanity checks. # Check that the minimum number of arguments have been called. if len(self.argv) < 2: raise JhubctlError( "Not enough arguments. \n\n" "Expected: jhubctl <action> <resource> <name>") # Check action self.resource_action = self.argv[0] if self.resource_action not in self.subcommands: raise JhubctlError( f"Subcommand is not recognized; must be one of these: {self.subcommands}") # Check resource self.resource_type = self.argv[1] if self.resource_type not in self.resources: raise JhubctlError( f"First argument after a subcommand must one of these" f"resources: {self.resources}" ) # Get name of resource. try: self.resource_name = self.argv[2] except IndexError: if self.resource_action != "get": raise JhubctlError( "Not enough arguments. \n\n" "Expected: jhubctl <action> <resource> <name>") else: self.resource_name = None # flatten flags&aliases, so cl-args get appropriate priority: flags, aliases = self.flatten_flags() loader = KVArgParseConfigLoader(argv=argv, aliases=aliases, flags=flags, log=self.log) config = loader.load_config() self.update_config(config) # store unparsed args in extra_args self.extra_args = loader.extra_args
[ "def", "parse_command_line", "(", "self", ",", "argv", "=", "None", ")", ":", "argv", "=", "sys", ".", "argv", "[", "1", ":", "]", "if", "argv", "is", "None", "else", "argv", "self", ".", "argv", "=", "[", "py3compat", ".", "cast_unicode", "(", "arg", ")", "for", "arg", "in", "argv", "]", "# Append Provider Class to the list of configurable items.", "ProviderClass", "=", "getattr", "(", "providers", ",", "self", ".", "provider_type", ")", "self", ".", "classes", ".", "append", "(", "ProviderClass", ")", "if", "any", "(", "x", "in", "self", ".", "argv", "for", "x", "in", "(", "'-h'", ",", "'--help-all'", ",", "'--help'", ")", ")", ":", "self", ".", "print_help", "(", "'--help-all'", "in", "self", ".", "argv", ")", "self", ".", "exit", "(", "0", ")", "if", "'--version'", "in", "self", ".", "argv", "or", "'-V'", "in", "self", ".", "argv", ":", "self", ".", "print_version", "(", ")", "self", ".", "exit", "(", "0", ")", "# Generate a configuration file if flag is given.", "if", "'--generate-config'", "in", "self", ".", "argv", ":", "conf", "=", "self", ".", "generate_config_file", "(", ")", "with", "open", "(", "self", ".", "config_file", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "conf", ")", "self", ".", "exit", "(", "0", ")", "# If not config, parse commands.", "## Run sanity checks.", "# Check that the minimum number of arguments have been called.", "if", "len", "(", "self", ".", "argv", ")", "<", "2", ":", "raise", "JhubctlError", "(", "\"Not enough arguments. \\n\\n\"", "\"Expected: jhubctl <action> <resource> <name>\"", ")", "# Check action", "self", ".", "resource_action", "=", "self", ".", "argv", "[", "0", "]", "if", "self", ".", "resource_action", "not", "in", "self", ".", "subcommands", ":", "raise", "JhubctlError", "(", "f\"Subcommand is not recognized; must be one of these: {self.subcommands}\"", ")", "# Check resource", "self", ".", "resource_type", "=", "self", ".", "argv", "[", "1", "]", "if", "self", ".", "resource_type", "not", "in", "self", ".", "resources", ":", "raise", "JhubctlError", "(", "f\"First argument after a subcommand must one of these\"", "f\"resources: {self.resources}\"", ")", "# Get name of resource.", "try", ":", "self", ".", "resource_name", "=", "self", ".", "argv", "[", "2", "]", "except", "IndexError", ":", "if", "self", ".", "resource_action", "!=", "\"get\"", ":", "raise", "JhubctlError", "(", "\"Not enough arguments. \\n\\n\"", "\"Expected: jhubctl <action> <resource> <name>\"", ")", "else", ":", "self", ".", "resource_name", "=", "None", "# flatten flags&aliases, so cl-args get appropriate priority:", "flags", ",", "aliases", "=", "self", ".", "flatten_flags", "(", ")", "loader", "=", "KVArgParseConfigLoader", "(", "argv", "=", "argv", ",", "aliases", "=", "aliases", ",", "flags", "=", "flags", ",", "log", "=", "self", ".", "log", ")", "config", "=", "loader", ".", "load_config", "(", ")", "self", ".", "update_config", "(", "config", ")", "# store unparsed args in extra_args", "self", ".", "extra_args", "=", "loader", ".", "extra_args" ]
Parse the jhubctl command line arguments. This overwrites traitlets' default `parse_command_line` method and tailors it to jhubctl's needs.
[ "Parse", "the", "jhubctl", "command", "line", "arguments", ".", "This", "overwrites", "traitlets", "default", "parse_command_line", "method", "and", "tailors", "it", "to", "jhubctl", "s", "needs", "." ]
c8c20f86a16e9d01dd90e4607d81423417cc773b
https://github.com/townsenddw/jhubctl/blob/c8c20f86a16e9d01dd90e4607d81423417cc773b/jhubctl/main.py#L131-L199
245,870
townsenddw/jhubctl
jhubctl/main.py
JhubctlApp.initialize
def initialize(self, argv=None): """Handle specific configurations.""" # Parse configuration items on command line. self.parse_command_line(argv) if self.config_file: self.load_config_file(self.config_file) # Initialize objects to interact with. self.kubeconf = KubeConf() self.cluster_list = ClusterList(kubeconf=self.kubeconf) self.hub_list = HubList(kubeconf=self.kubeconf)
python
def initialize(self, argv=None): """Handle specific configurations.""" # Parse configuration items on command line. self.parse_command_line(argv) if self.config_file: self.load_config_file(self.config_file) # Initialize objects to interact with. self.kubeconf = KubeConf() self.cluster_list = ClusterList(kubeconf=self.kubeconf) self.hub_list = HubList(kubeconf=self.kubeconf)
[ "def", "initialize", "(", "self", ",", "argv", "=", "None", ")", ":", "# Parse configuration items on command line.", "self", ".", "parse_command_line", "(", "argv", ")", "if", "self", ".", "config_file", ":", "self", ".", "load_config_file", "(", "self", ".", "config_file", ")", "# Initialize objects to interact with.", "self", ".", "kubeconf", "=", "KubeConf", "(", ")", "self", ".", "cluster_list", "=", "ClusterList", "(", "kubeconf", "=", "self", ".", "kubeconf", ")", "self", ".", "hub_list", "=", "HubList", "(", "kubeconf", "=", "self", ".", "kubeconf", ")" ]
Handle specific configurations.
[ "Handle", "specific", "configurations", "." ]
c8c20f86a16e9d01dd90e4607d81423417cc773b
https://github.com/townsenddw/jhubctl/blob/c8c20f86a16e9d01dd90e4607d81423417cc773b/jhubctl/main.py#L201-L211
245,871
townsenddw/jhubctl
jhubctl/main.py
JhubctlApp.start
def start(self): """Execution happening on jhubctl.""" # Get specified resource. resource_list = getattr(self, f'{self.resource_type}_list') resource_action = getattr(resource_list, self.resource_action) resource_action(self.resource_name)
python
def start(self): """Execution happening on jhubctl.""" # Get specified resource. resource_list = getattr(self, f'{self.resource_type}_list') resource_action = getattr(resource_list, self.resource_action) resource_action(self.resource_name)
[ "def", "start", "(", "self", ")", ":", "# Get specified resource.", "resource_list", "=", "getattr", "(", "self", ",", "f'{self.resource_type}_list'", ")", "resource_action", "=", "getattr", "(", "resource_list", ",", "self", ".", "resource_action", ")", "resource_action", "(", "self", ".", "resource_name", ")" ]
Execution happening on jhubctl.
[ "Execution", "happening", "on", "jhubctl", "." ]
c8c20f86a16e9d01dd90e4607d81423417cc773b
https://github.com/townsenddw/jhubctl/blob/c8c20f86a16e9d01dd90e4607d81423417cc773b/jhubctl/main.py#L213-L218
245,872
TheOneHyer/arandomness
build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py
OmniTree.add_children
def add_children(self, children): """Adds new children nodes after filtering for duplicates Args: children (list): list of OmniTree nodes to add as children """ self._children += [c for c in children if c not in self._children]
python
def add_children(self, children): """Adds new children nodes after filtering for duplicates Args: children (list): list of OmniTree nodes to add as children """ self._children += [c for c in children if c not in self._children]
[ "def", "add_children", "(", "self", ",", "children", ")", ":", "self", ".", "_children", "+=", "[", "c", "for", "c", "in", "children", "if", "c", "not", "in", "self", ".", "_children", "]" ]
Adds new children nodes after filtering for duplicates Args: children (list): list of OmniTree nodes to add as children
[ "Adds", "new", "children", "nodes", "after", "filtering", "for", "duplicates" ]
ae9f630e9a1d67b0eb6d61644a49756de8a5268c
https://github.com/TheOneHyer/arandomness/blob/ae9f630e9a1d67b0eb6d61644a49756de8a5268c/build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py#L57-L64
245,873
TheOneHyer/arandomness
build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py
OmniTree.add_parents
def add_parents(self, parents): """Adds new parent nodes after filtering for duplicates Args: parents (list): list of OmniTree nodes to add as parents """ self._parents += [p for p in parents if p not in self._parents]
python
def add_parents(self, parents): """Adds new parent nodes after filtering for duplicates Args: parents (list): list of OmniTree nodes to add as parents """ self._parents += [p for p in parents if p not in self._parents]
[ "def", "add_parents", "(", "self", ",", "parents", ")", ":", "self", ".", "_parents", "+=", "[", "p", "for", "p", "in", "parents", "if", "p", "not", "in", "self", ".", "_parents", "]" ]
Adds new parent nodes after filtering for duplicates Args: parents (list): list of OmniTree nodes to add as parents
[ "Adds", "new", "parent", "nodes", "after", "filtering", "for", "duplicates" ]
ae9f630e9a1d67b0eb6d61644a49756de8a5268c
https://github.com/TheOneHyer/arandomness/blob/ae9f630e9a1d67b0eb6d61644a49756de8a5268c/build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py#L66-L73
245,874
TheOneHyer/arandomness
build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py
OmniTree.find_loops
def find_loops(self, _path=None): """Crappy function that finds a single loop in the tree""" if _path is None: _path = [] if self in _path: return _path + [self] elif self._children == []: return None else: for child in self._children: return child.find_loops(_path + [self])
python
def find_loops(self, _path=None): """Crappy function that finds a single loop in the tree""" if _path is None: _path = [] if self in _path: return _path + [self] elif self._children == []: return None else: for child in self._children: return child.find_loops(_path + [self])
[ "def", "find_loops", "(", "self", ",", "_path", "=", "None", ")", ":", "if", "_path", "is", "None", ":", "_path", "=", "[", "]", "if", "self", "in", "_path", ":", "return", "_path", "+", "[", "self", "]", "elif", "self", ".", "_children", "==", "[", "]", ":", "return", "None", "else", ":", "for", "child", "in", "self", ".", "_children", ":", "return", "child", ".", "find_loops", "(", "_path", "+", "[", "self", "]", ")" ]
Crappy function that finds a single loop in the tree
[ "Crappy", "function", "that", "finds", "a", "single", "loop", "in", "the", "tree" ]
ae9f630e9a1d67b0eb6d61644a49756de8a5268c
https://github.com/TheOneHyer/arandomness/blob/ae9f630e9a1d67b0eb6d61644a49756de8a5268c/build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py#L75-L87
245,875
TheOneHyer/arandomness
build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py
OmniTree.find_branches
def find_branches(self, labels=False, unique=False): """Recursively constructs a list of pointers of the tree's structure Args: labels (bool): If True, returned lists consist of node labels. If False (default), lists consist of node pointers. This option is mostly intended for debugging purposes. unique (bool): If True, return lists of all unique, linear branches of the tree. More accurately, it returns a list of lists where each list contains a single, unique, linear path from the calling node to the tree's leaf nodes. If False (default), a highly-nested list is returned where each nested list represents a branch point in the tree. See Examples for more. Examples: >>> from arandomness.trees import OmniTree >>> a = OmniTree(label='a') >>> b = OmniTree(label='b', parents=[a]) >>> c = OmniTree(label='c', parents=[b]) >>> d = OmniTree(label='d', parents=[b]) >>> e = OmniTree(label='e', parents=[c, d]) >>> a.find_branches(labels=True) ['a', ['b', ['c', ['e']], ['d', ['e']]]] >>> a.find_branches(labels=True, unique=True) [['a', 'b', 'c', 'e'], ['a', 'b', 'd', 'e']] """ branches = [] # Assign proper item, pointer or label, to return if labels is True: identifier = [self.label] else: identifier = [self] if self._children == []: # Base Case: current node is a leaf/end node return identifier else: # Recursive Case: all other nodes for child in self._children: if unique is True: for branch in child.find_branches(labels=labels, unique=True): # I don't know why this 'if' is necessary, but it is if type(branch) is not list: branch = list(branch) branches.append(identifier + branch) else: branches.append(child.find_branches(labels=labels)) # Proper construction of list depends on 'unique' if unique is True: return branches else: return identifier + branches
python
def find_branches(self, labels=False, unique=False): """Recursively constructs a list of pointers of the tree's structure Args: labels (bool): If True, returned lists consist of node labels. If False (default), lists consist of node pointers. This option is mostly intended for debugging purposes. unique (bool): If True, return lists of all unique, linear branches of the tree. More accurately, it returns a list of lists where each list contains a single, unique, linear path from the calling node to the tree's leaf nodes. If False (default), a highly-nested list is returned where each nested list represents a branch point in the tree. See Examples for more. Examples: >>> from arandomness.trees import OmniTree >>> a = OmniTree(label='a') >>> b = OmniTree(label='b', parents=[a]) >>> c = OmniTree(label='c', parents=[b]) >>> d = OmniTree(label='d', parents=[b]) >>> e = OmniTree(label='e', parents=[c, d]) >>> a.find_branches(labels=True) ['a', ['b', ['c', ['e']], ['d', ['e']]]] >>> a.find_branches(labels=True, unique=True) [['a', 'b', 'c', 'e'], ['a', 'b', 'd', 'e']] """ branches = [] # Assign proper item, pointer or label, to return if labels is True: identifier = [self.label] else: identifier = [self] if self._children == []: # Base Case: current node is a leaf/end node return identifier else: # Recursive Case: all other nodes for child in self._children: if unique is True: for branch in child.find_branches(labels=labels, unique=True): # I don't know why this 'if' is necessary, but it is if type(branch) is not list: branch = list(branch) branches.append(identifier + branch) else: branches.append(child.find_branches(labels=labels)) # Proper construction of list depends on 'unique' if unique is True: return branches else: return identifier + branches
[ "def", "find_branches", "(", "self", ",", "labels", "=", "False", ",", "unique", "=", "False", ")", ":", "branches", "=", "[", "]", "# Assign proper item, pointer or label, to return", "if", "labels", "is", "True", ":", "identifier", "=", "[", "self", ".", "label", "]", "else", ":", "identifier", "=", "[", "self", "]", "if", "self", ".", "_children", "==", "[", "]", ":", "# Base Case: current node is a leaf/end node", "return", "identifier", "else", ":", "# Recursive Case: all other nodes", "for", "child", "in", "self", ".", "_children", ":", "if", "unique", "is", "True", ":", "for", "branch", "in", "child", ".", "find_branches", "(", "labels", "=", "labels", ",", "unique", "=", "True", ")", ":", "# I don't know why this 'if' is necessary, but it is", "if", "type", "(", "branch", ")", "is", "not", "list", ":", "branch", "=", "list", "(", "branch", ")", "branches", ".", "append", "(", "identifier", "+", "branch", ")", "else", ":", "branches", ".", "append", "(", "child", ".", "find_branches", "(", "labels", "=", "labels", ")", ")", "# Proper construction of list depends on 'unique'", "if", "unique", "is", "True", ":", "return", "branches", "else", ":", "return", "identifier", "+", "branches" ]
Recursively constructs a list of pointers of the tree's structure Args: labels (bool): If True, returned lists consist of node labels. If False (default), lists consist of node pointers. This option is mostly intended for debugging purposes. unique (bool): If True, return lists of all unique, linear branches of the tree. More accurately, it returns a list of lists where each list contains a single, unique, linear path from the calling node to the tree's leaf nodes. If False (default), a highly-nested list is returned where each nested list represents a branch point in the tree. See Examples for more. Examples: >>> from arandomness.trees import OmniTree >>> a = OmniTree(label='a') >>> b = OmniTree(label='b', parents=[a]) >>> c = OmniTree(label='c', parents=[b]) >>> d = OmniTree(label='d', parents=[b]) >>> e = OmniTree(label='e', parents=[c, d]) >>> a.find_branches(labels=True) ['a', ['b', ['c', ['e']], ['d', ['e']]]] >>> a.find_branches(labels=True, unique=True) [['a', 'b', 'c', 'e'], ['a', 'b', 'd', 'e']]
[ "Recursively", "constructs", "a", "list", "of", "pointers", "of", "the", "tree", "s", "structure" ]
ae9f630e9a1d67b0eb6d61644a49756de8a5268c
https://github.com/TheOneHyer/arandomness/blob/ae9f630e9a1d67b0eb6d61644a49756de8a5268c/build/lib.linux-x86_64-3.6/arandomness/trees/omnitree.py#L89-L147
245,876
exekias/droplet
droplet/samba/module.py
Samba.install
def install(self): """ Installation procedure, it writes basic smb.conf and uses samba-tool to provision the domain """ domain_settings = DomainSettings.get() with root(): if os.path.exists(self.SMBCONF_FILE): os.remove(self.SMBCONF_FILE) if domain_settings.mode == 'ad': domain_settings.adminpass = make_password(15) domain_settings.save() run("samba-tool domain provision " "--domain='%s' " "--workgroup='%s' " "--realm='%s' " "--use-xattrs=yes " "--use-rfc2307 " "--server-role='domain controller' " "--use-ntvfs " "--adminpass='%s'" % (domain_settings.domain, domain_settings.workgroup, domain_settings.realm, domain_settings.adminpass)) self.smbconf.write() shutil.copy2(self.SMB_KRB5CONF_FILE, self.KRB5CONF_FILE) # XXX FIXME move this to network run("echo 'nameserver 127.0.0.1' > /etc/resolv.conf") # TODO manage shares run("touch /etc/samba/shares.conf") elif domain_settings.mode == 'member': # TODO pass
python
def install(self): """ Installation procedure, it writes basic smb.conf and uses samba-tool to provision the domain """ domain_settings = DomainSettings.get() with root(): if os.path.exists(self.SMBCONF_FILE): os.remove(self.SMBCONF_FILE) if domain_settings.mode == 'ad': domain_settings.adminpass = make_password(15) domain_settings.save() run("samba-tool domain provision " "--domain='%s' " "--workgroup='%s' " "--realm='%s' " "--use-xattrs=yes " "--use-rfc2307 " "--server-role='domain controller' " "--use-ntvfs " "--adminpass='%s'" % (domain_settings.domain, domain_settings.workgroup, domain_settings.realm, domain_settings.adminpass)) self.smbconf.write() shutil.copy2(self.SMB_KRB5CONF_FILE, self.KRB5CONF_FILE) # XXX FIXME move this to network run("echo 'nameserver 127.0.0.1' > /etc/resolv.conf") # TODO manage shares run("touch /etc/samba/shares.conf") elif domain_settings.mode == 'member': # TODO pass
[ "def", "install", "(", "self", ")", ":", "domain_settings", "=", "DomainSettings", ".", "get", "(", ")", "with", "root", "(", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "self", ".", "SMBCONF_FILE", ")", ":", "os", ".", "remove", "(", "self", ".", "SMBCONF_FILE", ")", "if", "domain_settings", ".", "mode", "==", "'ad'", ":", "domain_settings", ".", "adminpass", "=", "make_password", "(", "15", ")", "domain_settings", ".", "save", "(", ")", "run", "(", "\"samba-tool domain provision \"", "\"--domain='%s' \"", "\"--workgroup='%s' \"", "\"--realm='%s' \"", "\"--use-xattrs=yes \"", "\"--use-rfc2307 \"", "\"--server-role='domain controller' \"", "\"--use-ntvfs \"", "\"--adminpass='%s'\"", "%", "(", "domain_settings", ".", "domain", ",", "domain_settings", ".", "workgroup", ",", "domain_settings", ".", "realm", ",", "domain_settings", ".", "adminpass", ")", ")", "self", ".", "smbconf", ".", "write", "(", ")", "shutil", ".", "copy2", "(", "self", ".", "SMB_KRB5CONF_FILE", ",", "self", ".", "KRB5CONF_FILE", ")", "# XXX FIXME move this to network", "run", "(", "\"echo 'nameserver 127.0.0.1' > /etc/resolv.conf\"", ")", "# TODO manage shares", "run", "(", "\"touch /etc/samba/shares.conf\"", ")", "elif", "domain_settings", ".", "mode", "==", "'member'", ":", "# TODO", "pass" ]
Installation procedure, it writes basic smb.conf and uses samba-tool to provision the domain
[ "Installation", "procedure", "it", "writes", "basic", "smb", ".", "conf", "and", "uses", "samba", "-", "tool", "to", "provision", "the", "domain" ]
aeac573a2c1c4b774e99d5414a1c79b1bb734941
https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/samba/module.py#L42-L82
245,877
exekias/droplet
droplet/samba/module.py
Samba.stop_other_daemons
def stop_other_daemons(self): """ Stop services already provided by main samba daemon """ if self.smbd.running: self.smbd.stop() if self.nmbd.running: self.nmbd.stop()
python
def stop_other_daemons(self): """ Stop services already provided by main samba daemon """ if self.smbd.running: self.smbd.stop() if self.nmbd.running: self.nmbd.stop()
[ "def", "stop_other_daemons", "(", "self", ")", ":", "if", "self", ".", "smbd", ".", "running", ":", "self", ".", "smbd", ".", "stop", "(", ")", "if", "self", ".", "nmbd", ".", "running", ":", "self", ".", "nmbd", ".", "stop", "(", ")" ]
Stop services already provided by main samba daemon
[ "Stop", "services", "already", "provided", "by", "main", "samba", "daemon" ]
aeac573a2c1c4b774e99d5414a1c79b1bb734941
https://github.com/exekias/droplet/blob/aeac573a2c1c4b774e99d5414a1c79b1bb734941/droplet/samba/module.py#L94-L102
245,878
etcher-be/elib_config
elib_config/_setup.py
ELIBConfig.check
def check(cls): """ Verifies that all necessary values for the package to be used have been provided :raises: `elib_config._exc.IncompleteSetupError` """ attribs = [ 'app_version', 'app_name', 'config_file_path', 'config_sep_str', ] for attrib in attribs: if getattr(cls, attrib) == 'not_set': raise IncompleteSetupError(f'elib_config setup is incomplete; missing: {attrib}')
python
def check(cls): """ Verifies that all necessary values for the package to be used have been provided :raises: `elib_config._exc.IncompleteSetupError` """ attribs = [ 'app_version', 'app_name', 'config_file_path', 'config_sep_str', ] for attrib in attribs: if getattr(cls, attrib) == 'not_set': raise IncompleteSetupError(f'elib_config setup is incomplete; missing: {attrib}')
[ "def", "check", "(", "cls", ")", ":", "attribs", "=", "[", "'app_version'", ",", "'app_name'", ",", "'config_file_path'", ",", "'config_sep_str'", ",", "]", "for", "attrib", "in", "attribs", ":", "if", "getattr", "(", "cls", ",", "attrib", ")", "==", "'not_set'", ":", "raise", "IncompleteSetupError", "(", "f'elib_config setup is incomplete; missing: {attrib}'", ")" ]
Verifies that all necessary values for the package to be used have been provided :raises: `elib_config._exc.IncompleteSetupError`
[ "Verifies", "that", "all", "necessary", "values", "for", "the", "package", "to", "be", "used", "have", "been", "provided" ]
5d8c839e84d70126620ab0186dc1f717e5868bd0
https://github.com/etcher-be/elib_config/blob/5d8c839e84d70126620ab0186dc1f717e5868bd0/elib_config/_setup.py#L23-L37
245,879
etcher-be/elib_config
elib_config/_setup.py
ELIBConfig.setup
def setup( cls, app_version: str, app_name: str, config_file_path: str, config_sep_str: str, root_path: typing.Optional[typing.List[str]] = None, ): """ Configures elib_config in one fell swoop :param app_version: version of the application :param app_name:name of the application :param config_file_path: path to the config file to use :param config_sep_str: separator for config values paths :param root_path: list of strings that will be pre-pended to *all* config values paths (useful to setup a prefix for the whole app) """ cls.app_version = app_version cls.app_name = app_name cls.config_file_path = config_file_path cls.config_sep_str = config_sep_str cls.root_path = root_path
python
def setup( cls, app_version: str, app_name: str, config_file_path: str, config_sep_str: str, root_path: typing.Optional[typing.List[str]] = None, ): """ Configures elib_config in one fell swoop :param app_version: version of the application :param app_name:name of the application :param config_file_path: path to the config file to use :param config_sep_str: separator for config values paths :param root_path: list of strings that will be pre-pended to *all* config values paths (useful to setup a prefix for the whole app) """ cls.app_version = app_version cls.app_name = app_name cls.config_file_path = config_file_path cls.config_sep_str = config_sep_str cls.root_path = root_path
[ "def", "setup", "(", "cls", ",", "app_version", ":", "str", ",", "app_name", ":", "str", ",", "config_file_path", ":", "str", ",", "config_sep_str", ":", "str", ",", "root_path", ":", "typing", ".", "Optional", "[", "typing", ".", "List", "[", "str", "]", "]", "=", "None", ",", ")", ":", "cls", ".", "app_version", "=", "app_version", "cls", ".", "app_name", "=", "app_name", "cls", ".", "config_file_path", "=", "config_file_path", "cls", ".", "config_sep_str", "=", "config_sep_str", "cls", ".", "root_path", "=", "root_path" ]
Configures elib_config in one fell swoop :param app_version: version of the application :param app_name:name of the application :param config_file_path: path to the config file to use :param config_sep_str: separator for config values paths :param root_path: list of strings that will be pre-pended to *all* config values paths (useful to setup a prefix for the whole app)
[ "Configures", "elib_config", "in", "one", "fell", "swoop" ]
5d8c839e84d70126620ab0186dc1f717e5868bd0
https://github.com/etcher-be/elib_config/blob/5d8c839e84d70126620ab0186dc1f717e5868bd0/elib_config/_setup.py#L40-L62
245,880
walidsa3d/baklava
baklava/providers/searchfactory.py
SearchFactory.sortBy
def sortBy(self, *args): '''sort results by given criteria''' criteria = ['seeds', 'size'] for k in args: if k in criteria: self.results = sorted( self.results, key=attrgetter(k), reverse=True) return self
python
def sortBy(self, *args): '''sort results by given criteria''' criteria = ['seeds', 'size'] for k in args: if k in criteria: self.results = sorted( self.results, key=attrgetter(k), reverse=True) return self
[ "def", "sortBy", "(", "self", ",", "*", "args", ")", ":", "criteria", "=", "[", "'seeds'", ",", "'size'", "]", "for", "k", "in", "args", ":", "if", "k", "in", "criteria", ":", "self", ".", "results", "=", "sorted", "(", "self", ".", "results", ",", "key", "=", "attrgetter", "(", "k", ")", ",", "reverse", "=", "True", ")", "return", "self" ]
sort results by given criteria
[ "sort", "results", "by", "given", "criteria" ]
98de03bb28f846153b41d4abf801014c5791ff54
https://github.com/walidsa3d/baklava/blob/98de03bb28f846153b41d4abf801014c5791ff54/baklava/providers/searchfactory.py#L37-L44
245,881
walidsa3d/baklava
baklava/providers/searchfactory.py
SearchFactory.filterBy
def filterBy(self, **kwargs): ''' filter results by given criteria''' criteria = ['seeds', 'size'] for k, v in kwargs.iteritems(): if k in criteria: self.results = filter( lambda x: getattr(x, k) >= v, self.results) return self
python
def filterBy(self, **kwargs): ''' filter results by given criteria''' criteria = ['seeds', 'size'] for k, v in kwargs.iteritems(): if k in criteria: self.results = filter( lambda x: getattr(x, k) >= v, self.results) return self
[ "def", "filterBy", "(", "self", ",", "*", "*", "kwargs", ")", ":", "criteria", "=", "[", "'seeds'", ",", "'size'", "]", "for", "k", ",", "v", "in", "kwargs", ".", "iteritems", "(", ")", ":", "if", "k", "in", "criteria", ":", "self", ".", "results", "=", "filter", "(", "lambda", "x", ":", "getattr", "(", "x", ",", "k", ")", ">=", "v", ",", "self", ".", "results", ")", "return", "self" ]
filter results by given criteria
[ "filter", "results", "by", "given", "criteria" ]
98de03bb28f846153b41d4abf801014c5791ff54
https://github.com/walidsa3d/baklava/blob/98de03bb28f846153b41d4abf801014c5791ff54/baklava/providers/searchfactory.py#L46-L53
245,882
20c/twentyc.tools
twentyc/tools/session.py
perms_check
def perms_check(perms, prefix, ambiguous=False): """ Return the user's perms for the specified prefix perms <dict> permissions dict prefix <string> namespace to check for perms ambiguous <bool=False> if True reverse wildcard matching is active and a perm check for a.b.* will be matched by the user having perms to a.b.c or a.b.d - only use this if you know what you are doing. """ try: token = prefix.split(".") i = 1 l = len(token) r = 0 # collect permission rules with a wildcard in them, so we dont do unecessary # regex searches later on perms_wc = {} for ns, p in perms.items(): if ns.find("*") > -1: perms_wc[re.escape(ns).replace("\*", "[^\.]+")] = p while i <= l: k = ".".join(token[:i]) matched = False # check for exact match if perms.has_key(k): r = perms.get(k) # check for wildcard matches (if any wildcard rules exist) elif perms_wc: for ns, p in perms_wc.items(): a = "^%s$" % ns b = "^%s\." % ns j = len(a) u = len(b) if j > matched and re.match(a, k): r = p matched = j elif u > matched and re.match(b, k): r = p matched = u # if not matched at all and ambiguous flag is true, do ambiguous matching if not matched and ambiguous: m = "^%s" % re.escape(k).replace("\*", "[^\.]+") for ns, p in perms.items(): if re.match(m, ns) and p > r: r = p break i += 1 return r except: raise
python
def perms_check(perms, prefix, ambiguous=False): """ Return the user's perms for the specified prefix perms <dict> permissions dict prefix <string> namespace to check for perms ambiguous <bool=False> if True reverse wildcard matching is active and a perm check for a.b.* will be matched by the user having perms to a.b.c or a.b.d - only use this if you know what you are doing. """ try: token = prefix.split(".") i = 1 l = len(token) r = 0 # collect permission rules with a wildcard in them, so we dont do unecessary # regex searches later on perms_wc = {} for ns, p in perms.items(): if ns.find("*") > -1: perms_wc[re.escape(ns).replace("\*", "[^\.]+")] = p while i <= l: k = ".".join(token[:i]) matched = False # check for exact match if perms.has_key(k): r = perms.get(k) # check for wildcard matches (if any wildcard rules exist) elif perms_wc: for ns, p in perms_wc.items(): a = "^%s$" % ns b = "^%s\." % ns j = len(a) u = len(b) if j > matched and re.match(a, k): r = p matched = j elif u > matched and re.match(b, k): r = p matched = u # if not matched at all and ambiguous flag is true, do ambiguous matching if not matched and ambiguous: m = "^%s" % re.escape(k).replace("\*", "[^\.]+") for ns, p in perms.items(): if re.match(m, ns) and p > r: r = p break i += 1 return r except: raise
[ "def", "perms_check", "(", "perms", ",", "prefix", ",", "ambiguous", "=", "False", ")", ":", "try", ":", "token", "=", "prefix", ".", "split", "(", "\".\"", ")", "i", "=", "1", "l", "=", "len", "(", "token", ")", "r", "=", "0", "# collect permission rules with a wildcard in them, so we dont do unecessary", "# regex searches later on", "perms_wc", "=", "{", "}", "for", "ns", ",", "p", "in", "perms", ".", "items", "(", ")", ":", "if", "ns", ".", "find", "(", "\"*\"", ")", ">", "-", "1", ":", "perms_wc", "[", "re", ".", "escape", "(", "ns", ")", ".", "replace", "(", "\"\\*\"", ",", "\"[^\\.]+\"", ")", "]", "=", "p", "while", "i", "<=", "l", ":", "k", "=", "\".\"", ".", "join", "(", "token", "[", ":", "i", "]", ")", "matched", "=", "False", "# check for exact match", "if", "perms", ".", "has_key", "(", "k", ")", ":", "r", "=", "perms", ".", "get", "(", "k", ")", "# check for wildcard matches (if any wildcard rules exist)", "elif", "perms_wc", ":", "for", "ns", ",", "p", "in", "perms_wc", ".", "items", "(", ")", ":", "a", "=", "\"^%s$\"", "%", "ns", "b", "=", "\"^%s\\.\"", "%", "ns", "j", "=", "len", "(", "a", ")", "u", "=", "len", "(", "b", ")", "if", "j", ">", "matched", "and", "re", ".", "match", "(", "a", ",", "k", ")", ":", "r", "=", "p", "matched", "=", "j", "elif", "u", ">", "matched", "and", "re", ".", "match", "(", "b", ",", "k", ")", ":", "r", "=", "p", "matched", "=", "u", "# if not matched at all and ambiguous flag is true, do ambiguous matching", "if", "not", "matched", "and", "ambiguous", ":", "m", "=", "\"^%s\"", "%", "re", ".", "escape", "(", "k", ")", ".", "replace", "(", "\"\\*\"", ",", "\"[^\\.]+\"", ")", "for", "ns", ",", "p", "in", "perms", ".", "items", "(", ")", ":", "if", "re", ".", "match", "(", "m", ",", "ns", ")", "and", "p", ">", "r", ":", "r", "=", "p", "break", "i", "+=", "1", "return", "r", "except", ":", "raise" ]
Return the user's perms for the specified prefix perms <dict> permissions dict prefix <string> namespace to check for perms ambiguous <bool=False> if True reverse wildcard matching is active and a perm check for a.b.* will be matched by the user having perms to a.b.c or a.b.d - only use this if you know what you are doing.
[ "Return", "the", "user", "s", "perms", "for", "the", "specified", "prefix" ]
f8f681e64f58d449bfc32646ba8bcc57db90a233
https://github.com/20c/twentyc.tools/blob/f8f681e64f58d449bfc32646ba8bcc57db90a233/twentyc/tools/session.py#L43-L105
245,883
anti1869/sunhead
src/sunhead/conf.py
Settings.discover_config_path
def discover_config_path(self, config_filename: str) -> str: """ Search for config file in a number of places. If there is no config file found, will return None. :param config_filename: Config file name or custom path to filename with config. :return: Path to the discovered config file or None. """ if config_filename and os.path.isfile(config_filename): return config_filename for place in _common_places: config_path = os.path.join(place, config_filename) if os.path.isfile(config_path): return config_path return
python
def discover_config_path(self, config_filename: str) -> str: """ Search for config file in a number of places. If there is no config file found, will return None. :param config_filename: Config file name or custom path to filename with config. :return: Path to the discovered config file or None. """ if config_filename and os.path.isfile(config_filename): return config_filename for place in _common_places: config_path = os.path.join(place, config_filename) if os.path.isfile(config_path): return config_path return
[ "def", "discover_config_path", "(", "self", ",", "config_filename", ":", "str", ")", "->", "str", ":", "if", "config_filename", "and", "os", ".", "path", ".", "isfile", "(", "config_filename", ")", ":", "return", "config_filename", "for", "place", "in", "_common_places", ":", "config_path", "=", "os", ".", "path", ".", "join", "(", "place", ",", "config_filename", ")", "if", "os", ".", "path", ".", "isfile", "(", "config_path", ")", ":", "return", "config_path", "return" ]
Search for config file in a number of places. If there is no config file found, will return None. :param config_filename: Config file name or custom path to filename with config. :return: Path to the discovered config file or None.
[ "Search", "for", "config", "file", "in", "a", "number", "of", "places", ".", "If", "there", "is", "no", "config", "file", "found", "will", "return", "None", "." ]
5117ec797a38eb82d955241d20547d125efe80f3
https://github.com/anti1869/sunhead/blob/5117ec797a38eb82d955241d20547d125efe80f3/src/sunhead/conf.py#L120-L137
245,884
anti1869/sunhead
src/sunhead/conf.py
Settings.gen_from_yaml_config
def gen_from_yaml_config(self, config_path: str) -> Iterator: """ Convention is to uppercase first level keys. :param config_path: Valid path to the yml config file. :return: Config loaded from yml file """ if not config_path: return {} with open(config_path, 'r') as f: yaml_config = yaml.load(f) gen = map(lambda x: (x[0].upper(), x[1]), yaml_config.items()) return gen
python
def gen_from_yaml_config(self, config_path: str) -> Iterator: """ Convention is to uppercase first level keys. :param config_path: Valid path to the yml config file. :return: Config loaded from yml file """ if not config_path: return {} with open(config_path, 'r') as f: yaml_config = yaml.load(f) gen = map(lambda x: (x[0].upper(), x[1]), yaml_config.items()) return gen
[ "def", "gen_from_yaml_config", "(", "self", ",", "config_path", ":", "str", ")", "->", "Iterator", ":", "if", "not", "config_path", ":", "return", "{", "}", "with", "open", "(", "config_path", ",", "'r'", ")", "as", "f", ":", "yaml_config", "=", "yaml", ".", "load", "(", "f", ")", "gen", "=", "map", "(", "lambda", "x", ":", "(", "x", "[", "0", "]", ".", "upper", "(", ")", ",", "x", "[", "1", "]", ")", ",", "yaml_config", ".", "items", "(", ")", ")", "return", "gen" ]
Convention is to uppercase first level keys. :param config_path: Valid path to the yml config file. :return: Config loaded from yml file
[ "Convention", "is", "to", "uppercase", "first", "level", "keys", "." ]
5117ec797a38eb82d955241d20547d125efe80f3
https://github.com/anti1869/sunhead/blob/5117ec797a38eb82d955241d20547d125efe80f3/src/sunhead/conf.py#L139-L154
245,885
anti1869/sunhead
src/sunhead/conf.py
Settings.remove_handler_if_not_configured
def remove_handler_if_not_configured(self, dict_config, requested_handlers, handler_name, check_key) -> None: """ Remove ``handler_name`` from ``dict_config`` and ``requested_handlers`` if ``check_key`` is empty. """ try: if not dict_config["handlers"][handler_name][check_key]: dict_config["handlers"].pop(handler_name) if handler_name in requested_handlers: requested_handlers.remove(handler_name) except KeyError: # Ignore key errors pass
python
def remove_handler_if_not_configured(self, dict_config, requested_handlers, handler_name, check_key) -> None: """ Remove ``handler_name`` from ``dict_config`` and ``requested_handlers`` if ``check_key`` is empty. """ try: if not dict_config["handlers"][handler_name][check_key]: dict_config["handlers"].pop(handler_name) if handler_name in requested_handlers: requested_handlers.remove(handler_name) except KeyError: # Ignore key errors pass
[ "def", "remove_handler_if_not_configured", "(", "self", ",", "dict_config", ",", "requested_handlers", ",", "handler_name", ",", "check_key", ")", "->", "None", ":", "try", ":", "if", "not", "dict_config", "[", "\"handlers\"", "]", "[", "handler_name", "]", "[", "check_key", "]", ":", "dict_config", "[", "\"handlers\"", "]", ".", "pop", "(", "handler_name", ")", "if", "handler_name", "in", "requested_handlers", ":", "requested_handlers", ".", "remove", "(", "handler_name", ")", "except", "KeyError", ":", "# Ignore key errors", "pass" ]
Remove ``handler_name`` from ``dict_config`` and ``requested_handlers`` if ``check_key`` is empty.
[ "Remove", "handler_name", "from", "dict_config", "and", "requested_handlers", "if", "check_key", "is", "empty", "." ]
5117ec797a38eb82d955241d20547d125efe80f3
https://github.com/anti1869/sunhead/blob/5117ec797a38eb82d955241d20547d125efe80f3/src/sunhead/conf.py#L173-L184
245,886
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter.open
def open(self, name, attrs=None, *, close=False): """Writes an opening element. :name: the name of the element :attrs: a dict of attributes :close: if True, close will be called immediately after writing the element """ self._pad() self._writer.startElement(_normalize_name(name), _normalize_attrs(attrs)) self._newline() self._open_elements.append(name) if close: self.close()
python
def open(self, name, attrs=None, *, close=False): """Writes an opening element. :name: the name of the element :attrs: a dict of attributes :close: if True, close will be called immediately after writing the element """ self._pad() self._writer.startElement(_normalize_name(name), _normalize_attrs(attrs)) self._newline() self._open_elements.append(name) if close: self.close()
[ "def", "open", "(", "self", ",", "name", ",", "attrs", "=", "None", ",", "*", ",", "close", "=", "False", ")", ":", "self", ".", "_pad", "(", ")", "self", ".", "_writer", ".", "startElement", "(", "_normalize_name", "(", "name", ")", ",", "_normalize_attrs", "(", "attrs", ")", ")", "self", ".", "_newline", "(", ")", "self", ".", "_open_elements", ".", "append", "(", "name", ")", "if", "close", ":", "self", ".", "close", "(", ")" ]
Writes an opening element. :name: the name of the element :attrs: a dict of attributes :close: if True, close will be called immediately after writing the element
[ "Writes", "an", "opening", "element", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L98-L111
245,887
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter.close
def close(self, name=None): """Closes the most recently opened element. :name: if given, this value must match the name given for the most recently opened element. This is primarily here for providing quick error checking for applications """ tag = self._open_elements.pop() if name is not None and name != tag: raise Exception("Tag closing mismatch") self._pad() self._writer.endElement(_normalize_name(tag)) self._newline()
python
def close(self, name=None): """Closes the most recently opened element. :name: if given, this value must match the name given for the most recently opened element. This is primarily here for providing quick error checking for applications """ tag = self._open_elements.pop() if name is not None and name != tag: raise Exception("Tag closing mismatch") self._pad() self._writer.endElement(_normalize_name(tag)) self._newline()
[ "def", "close", "(", "self", ",", "name", "=", "None", ")", ":", "tag", "=", "self", ".", "_open_elements", ".", "pop", "(", ")", "if", "name", "is", "not", "None", "and", "name", "!=", "tag", ":", "raise", "Exception", "(", "\"Tag closing mismatch\"", ")", "self", ".", "_pad", "(", ")", "self", ".", "_writer", ".", "endElement", "(", "_normalize_name", "(", "tag", ")", ")", "self", ".", "_newline", "(", ")" ]
Closes the most recently opened element. :name: if given, this value must match the name given for the most recently opened element. This is primarily here for providing quick error checking for applications
[ "Closes", "the", "most", "recently", "opened", "element", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L113-L124
245,888
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter.characters
def characters(self, characters): """Writes content for a tag. :characters: the characters to write """ self._pad() self._writer.characters(str(characters)) self._newline()
python
def characters(self, characters): """Writes content for a tag. :characters: the characters to write """ self._pad() self._writer.characters(str(characters)) self._newline()
[ "def", "characters", "(", "self", ",", "characters", ")", ":", "self", ".", "_pad", "(", ")", "self", ".", "_writer", ".", "characters", "(", "str", "(", "characters", ")", ")", "self", ".", "_newline", "(", ")" ]
Writes content for a tag. :characters: the characters to write
[ "Writes", "content", "for", "a", "tag", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L126-L133
245,889
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter.element
def element(self, name, attrs=None): """This method is a context manager for writing and closing an element.""" self.open(name, attrs) yield self.close()
python
def element(self, name, attrs=None): """This method is a context manager for writing and closing an element.""" self.open(name, attrs) yield self.close()
[ "def", "element", "(", "self", ",", "name", ",", "attrs", "=", "None", ")", ":", "self", ".", "open", "(", "name", ",", "attrs", ")", "yield", "self", ".", "close", "(", ")" ]
This method is a context manager for writing and closing an element.
[ "This", "method", "is", "a", "context", "manager", "for", "writing", "and", "closing", "an", "element", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L143-L147
245,890
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter.no_inner_space
def no_inner_space(self, *, outer=True): """Default spacing for all things written is ignored in this context. :outer: boolean, if True the typical padding and newline are added before the first and after the last things written """ if outer: self._pad() indent_was = self._indent self._indent = None try: yield finally: self._indent = indent_was if outer: self._newline()
python
def no_inner_space(self, *, outer=True): """Default spacing for all things written is ignored in this context. :outer: boolean, if True the typical padding and newline are added before the first and after the last things written """ if outer: self._pad() indent_was = self._indent self._indent = None try: yield finally: self._indent = indent_was if outer: self._newline()
[ "def", "no_inner_space", "(", "self", ",", "*", ",", "outer", "=", "True", ")", ":", "if", "outer", ":", "self", ".", "_pad", "(", ")", "indent_was", "=", "self", ".", "_indent", "self", ".", "_indent", "=", "None", "try", ":", "yield", "finally", ":", "self", ".", "_indent", "=", "indent_was", "if", "outer", ":", "self", ".", "_newline", "(", ")" ]
Default spacing for all things written is ignored in this context. :outer: boolean, if True the typical padding and newline are added before the first and after the last things written
[ "Default", "spacing", "for", "all", "things", "written", "is", "ignored", "in", "this", "context", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L150-L168
245,891
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter.content
def content(self, name, attrs=None, characters=None): """Writes an element, some content for the element, and then closes the element, all without indentation. :name: the name of the element :attrs: a dict of attributes :characters: the characters to write """ with self.no_inner_space(outer=True): with self.element(name, attrs): if characters: self.characters(characters)
python
def content(self, name, attrs=None, characters=None): """Writes an element, some content for the element, and then closes the element, all without indentation. :name: the name of the element :attrs: a dict of attributes :characters: the characters to write """ with self.no_inner_space(outer=True): with self.element(name, attrs): if characters: self.characters(characters)
[ "def", "content", "(", "self", ",", "name", ",", "attrs", "=", "None", ",", "characters", "=", "None", ")", ":", "with", "self", ".", "no_inner_space", "(", "outer", "=", "True", ")", ":", "with", "self", ".", "element", "(", "name", ",", "attrs", ")", ":", "if", "characters", ":", "self", ".", "characters", "(", "characters", ")" ]
Writes an element, some content for the element, and then closes the element, all without indentation. :name: the name of the element :attrs: a dict of attributes :characters: the characters to write
[ "Writes", "an", "element", "some", "content", "for", "the", "element", "and", "then", "closes", "the", "element", "all", "without", "indentation", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L170-L181
245,892
treycucco/bidon
bidon/xml/stream_writer.py
StreamWriter._pad
def _pad(self): """Pads the output with an amount of indentation appropriate for the number of open element. This method does nothing if the indent value passed to the constructor is falsy. """ if self._indent: self.whitespace(self._indent * len(self._open_elements))
python
def _pad(self): """Pads the output with an amount of indentation appropriate for the number of open element. This method does nothing if the indent value passed to the constructor is falsy. """ if self._indent: self.whitespace(self._indent * len(self._open_elements))
[ "def", "_pad", "(", "self", ")", ":", "if", "self", ".", "_indent", ":", "self", ".", "whitespace", "(", "self", ".", "_indent", "*", "len", "(", "self", ".", "_open_elements", ")", ")" ]
Pads the output with an amount of indentation appropriate for the number of open element. This method does nothing if the indent value passed to the constructor is falsy.
[ "Pads", "the", "output", "with", "an", "amount", "of", "indentation", "appropriate", "for", "the", "number", "of", "open", "element", "." ]
d9f24596841d0e69e8ac70a1d1a1deecea95e340
https://github.com/treycucco/bidon/blob/d9f24596841d0e69e8ac70a1d1a1deecea95e340/bidon/xml/stream_writer.py#L183-L189
245,893
emencia/dr-dump
drdump/dependancies.py
DependanciesManager.get_dump_order
def get_dump_order(self, names): """ Return ordered dump names required for a given dump names list """ finded_names = self.get_dump_names(names) return [item for item in self if item in finded_names]
python
def get_dump_order(self, names): """ Return ordered dump names required for a given dump names list """ finded_names = self.get_dump_names(names) return [item for item in self if item in finded_names]
[ "def", "get_dump_order", "(", "self", ",", "names", ")", ":", "finded_names", "=", "self", ".", "get_dump_names", "(", "names", ")", "return", "[", "item", "for", "item", "in", "self", "if", "item", "in", "finded_names", "]" ]
Return ordered dump names required for a given dump names list
[ "Return", "ordered", "dump", "names", "required", "for", "a", "given", "dump", "names", "list" ]
f03a2ed01fb82e6fe1df66f7fa82e646a3822d4b
https://github.com/emencia/dr-dump/blob/f03a2ed01fb82e6fe1df66f7fa82e646a3822d4b/drdump/dependancies.py#L71-L76
245,894
mk-fg/txu1
txu1/api_v1.py
log_web_failure
def log_web_failure(err, err_lid=''): 'Try to print meaningful info about wrapped twisted.web exceptions.' if err_lid and not err_lid.endswith(' '): err_lid += ' ' try: err.value.reasons # multiple levels of fail except AttributeError: pass else: err = err.value if hasattr(err, 'reasons'): for err in err.reasons: lid = ' ' if isinstance(err, Failure): log.error('{}{} {}: {}'.format(err_lid, lid, err.type, err.getErrorMessage())) for line in err.getTraceback().splitlines(): log.error('{}{} {}'.format(err_lid, lid, line)) else: log.error('{}{} {}: {}'.format(err_lid, lid, type(err), err))
python
def log_web_failure(err, err_lid=''): 'Try to print meaningful info about wrapped twisted.web exceptions.' if err_lid and not err_lid.endswith(' '): err_lid += ' ' try: err.value.reasons # multiple levels of fail except AttributeError: pass else: err = err.value if hasattr(err, 'reasons'): for err in err.reasons: lid = ' ' if isinstance(err, Failure): log.error('{}{} {}: {}'.format(err_lid, lid, err.type, err.getErrorMessage())) for line in err.getTraceback().splitlines(): log.error('{}{} {}'.format(err_lid, lid, line)) else: log.error('{}{} {}: {}'.format(err_lid, lid, type(err), err))
[ "def", "log_web_failure", "(", "err", ",", "err_lid", "=", "''", ")", ":", "if", "err_lid", "and", "not", "err_lid", ".", "endswith", "(", "' '", ")", ":", "err_lid", "+=", "' '", "try", ":", "err", ".", "value", ".", "reasons", "# multiple levels of fail", "except", "AttributeError", ":", "pass", "else", ":", "err", "=", "err", ".", "value", "if", "hasattr", "(", "err", ",", "'reasons'", ")", ":", "for", "err", "in", "err", ".", "reasons", ":", "lid", "=", "' '", "if", "isinstance", "(", "err", ",", "Failure", ")", ":", "log", ".", "error", "(", "'{}{} {}: {}'", ".", "format", "(", "err_lid", ",", "lid", ",", "err", ".", "type", ",", "err", ".", "getErrorMessage", "(", ")", ")", ")", "for", "line", "in", "err", ".", "getTraceback", "(", ")", ".", "splitlines", "(", ")", ":", "log", ".", "error", "(", "'{}{} {}'", ".", "format", "(", "err_lid", ",", "lid", ",", "line", ")", ")", "else", ":", "log", ".", "error", "(", "'{}{} {}: {}'", ".", "format", "(", "err_lid", ",", "lid", ",", "type", "(", "err", ")", ",", "err", ")", ")" ]
Try to print meaningful info about wrapped twisted.web exceptions.
[ "Try", "to", "print", "meaningful", "info", "about", "wrapped", "twisted", ".", "web", "exceptions", "." ]
0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6
https://github.com/mk-fg/txu1/blob/0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6/txu1/api_v1.py#L46-L59
245,895
mk-fg/txu1
txu1/api_v1.py
force_bytes
def force_bytes(bytes_or_unicode, encoding='utf-8', errors='backslashreplace'): 'Convert passed string type to bytes, if necessary.' if isinstance(bytes_or_unicode, bytes): return bytes_or_unicode return bytes_or_unicode.encode(encoding, errors)
python
def force_bytes(bytes_or_unicode, encoding='utf-8', errors='backslashreplace'): 'Convert passed string type to bytes, if necessary.' if isinstance(bytes_or_unicode, bytes): return bytes_or_unicode return bytes_or_unicode.encode(encoding, errors)
[ "def", "force_bytes", "(", "bytes_or_unicode", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'backslashreplace'", ")", ":", "if", "isinstance", "(", "bytes_or_unicode", ",", "bytes", ")", ":", "return", "bytes_or_unicode", "return", "bytes_or_unicode", ".", "encode", "(", "encoding", ",", "errors", ")" ]
Convert passed string type to bytes, if necessary.
[ "Convert", "passed", "string", "type", "to", "bytes", "if", "necessary", "." ]
0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6
https://github.com/mk-fg/txu1/blob/0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6/txu1/api_v1.py#L61-L64
245,896
mk-fg/txu1
txu1/api_v1.py
txU1.volume_info
def volume_info(self, vol=None, type_filter=None): 'Get list of all volumes or info for the one specified.' vols = yield self(join('volumes', vol)) if not isinstance(vols, list): vols = [vols] if type_filter is not None: vols = list(vol for vol in vols if vol['type'] == type_filter) if vol is not None: defer.returnValue(vols[0] if vols else None) defer.returnValue(vols)
python
def volume_info(self, vol=None, type_filter=None): 'Get list of all volumes or info for the one specified.' vols = yield self(join('volumes', vol)) if not isinstance(vols, list): vols = [vols] if type_filter is not None: vols = list(vol for vol in vols if vol['type'] == type_filter) if vol is not None: defer.returnValue(vols[0] if vols else None) defer.returnValue(vols)
[ "def", "volume_info", "(", "self", ",", "vol", "=", "None", ",", "type_filter", "=", "None", ")", ":", "vols", "=", "yield", "self", "(", "join", "(", "'volumes'", ",", "vol", ")", ")", "if", "not", "isinstance", "(", "vols", ",", "list", ")", ":", "vols", "=", "[", "vols", "]", "if", "type_filter", "is", "not", "None", ":", "vols", "=", "list", "(", "vol", "for", "vol", "in", "vols", "if", "vol", "[", "'type'", "]", "==", "type_filter", ")", "if", "vol", "is", "not", "None", ":", "defer", ".", "returnValue", "(", "vols", "[", "0", "]", "if", "vols", "else", "None", ")", "defer", ".", "returnValue", "(", "vols", ")" ]
Get list of all volumes or info for the one specified.
[ "Get", "list", "of", "all", "volumes", "or", "info", "for", "the", "one", "specified", "." ]
0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6
https://github.com/mk-fg/txu1/blob/0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6/txu1/api_v1.py#L579-L586
245,897
mk-fg/txu1
txu1/api_v1.py
txU1.node_mkdir
def node_mkdir(self, path=''): 'Does not raise any errors if dir already exists.' return self(path, data=dict(kind='directory'), encode='json', method='put')
python
def node_mkdir(self, path=''): 'Does not raise any errors if dir already exists.' return self(path, data=dict(kind='directory'), encode='json', method='put')
[ "def", "node_mkdir", "(", "self", ",", "path", "=", "''", ")", ":", "return", "self", "(", "path", ",", "data", "=", "dict", "(", "kind", "=", "'directory'", ")", ",", "encode", "=", "'json'", ",", "method", "=", "'put'", ")" ]
Does not raise any errors if dir already exists.
[ "Does", "not", "raise", "any", "errors", "if", "dir", "already", "exists", "." ]
0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6
https://github.com/mk-fg/txu1/blob/0326e9105f3cf9efa17a3d2ed1dd5606e0ad57d6/txu1/api_v1.py#L641-L643
245,898
pythonkc/pythonkc-meetups
pythonkc_meetups/parsers.py
parse_event
def parse_event(data, attendees=None, photos=None): """ Parse a ``MeetupEvent`` from the given response data. Returns ------- A ``pythonkc_meetups.types.MeetupEvent``. """ return MeetupEvent( id=data.get('id', None), name=data.get('name', None), description=data.get('description', None), time=parse_datetime(data.get('time', None), data.get('utc_offset', None)), status=data.get('status', None), yes_rsvp_count=data.get('yes_rsvp_count', None), maybe_rsvp_count=data.get('maybe_rsvp_count', None), event_url=data.get('event_url', None), photo_url=data.get('photo_url', None), venue=parse_venue(data['venue']) if 'venue' in data else None, attendees=attendees, photos=photos )
python
def parse_event(data, attendees=None, photos=None): """ Parse a ``MeetupEvent`` from the given response data. Returns ------- A ``pythonkc_meetups.types.MeetupEvent``. """ return MeetupEvent( id=data.get('id', None), name=data.get('name', None), description=data.get('description', None), time=parse_datetime(data.get('time', None), data.get('utc_offset', None)), status=data.get('status', None), yes_rsvp_count=data.get('yes_rsvp_count', None), maybe_rsvp_count=data.get('maybe_rsvp_count', None), event_url=data.get('event_url', None), photo_url=data.get('photo_url', None), venue=parse_venue(data['venue']) if 'venue' in data else None, attendees=attendees, photos=photos )
[ "def", "parse_event", "(", "data", ",", "attendees", "=", "None", ",", "photos", "=", "None", ")", ":", "return", "MeetupEvent", "(", "id", "=", "data", ".", "get", "(", "'id'", ",", "None", ")", ",", "name", "=", "data", ".", "get", "(", "'name'", ",", "None", ")", ",", "description", "=", "data", ".", "get", "(", "'description'", ",", "None", ")", ",", "time", "=", "parse_datetime", "(", "data", ".", "get", "(", "'time'", ",", "None", ")", ",", "data", ".", "get", "(", "'utc_offset'", ",", "None", ")", ")", ",", "status", "=", "data", ".", "get", "(", "'status'", ",", "None", ")", ",", "yes_rsvp_count", "=", "data", ".", "get", "(", "'yes_rsvp_count'", ",", "None", ")", ",", "maybe_rsvp_count", "=", "data", ".", "get", "(", "'maybe_rsvp_count'", ",", "None", ")", ",", "event_url", "=", "data", ".", "get", "(", "'event_url'", ",", "None", ")", ",", "photo_url", "=", "data", ".", "get", "(", "'photo_url'", ",", "None", ")", ",", "venue", "=", "parse_venue", "(", "data", "[", "'venue'", "]", ")", "if", "'venue'", "in", "data", "else", "None", ",", "attendees", "=", "attendees", ",", "photos", "=", "photos", ")" ]
Parse a ``MeetupEvent`` from the given response data. Returns ------- A ``pythonkc_meetups.types.MeetupEvent``.
[ "Parse", "a", "MeetupEvent", "from", "the", "given", "response", "data", "." ]
54b5062b2825011c87c303256f59c6c13d395ee7
https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/parsers.py#L18-L41
245,899
pythonkc/pythonkc-meetups
pythonkc_meetups/parsers.py
parse_venue
def parse_venue(data): """ Parse a ``MeetupVenue`` from the given response data. Returns ------- A `pythonkc_meetups.types.`MeetupVenue``. """ return MeetupVenue( id=data.get('id', None), name=data.get('name', None), address_1=data.get('address_1', None), address_2=data.get('address_2', None), address_3=data.get('address_3', None), city=data.get('city', None), state=data.get('state', None), zip=data.get('zip', None), country=data.get('country', None), lat=data.get('lat', None), lon=data.get('lon', None) )
python
def parse_venue(data): """ Parse a ``MeetupVenue`` from the given response data. Returns ------- A `pythonkc_meetups.types.`MeetupVenue``. """ return MeetupVenue( id=data.get('id', None), name=data.get('name', None), address_1=data.get('address_1', None), address_2=data.get('address_2', None), address_3=data.get('address_3', None), city=data.get('city', None), state=data.get('state', None), zip=data.get('zip', None), country=data.get('country', None), lat=data.get('lat', None), lon=data.get('lon', None) )
[ "def", "parse_venue", "(", "data", ")", ":", "return", "MeetupVenue", "(", "id", "=", "data", ".", "get", "(", "'id'", ",", "None", ")", ",", "name", "=", "data", ".", "get", "(", "'name'", ",", "None", ")", ",", "address_1", "=", "data", ".", "get", "(", "'address_1'", ",", "None", ")", ",", "address_2", "=", "data", ".", "get", "(", "'address_2'", ",", "None", ")", ",", "address_3", "=", "data", ".", "get", "(", "'address_3'", ",", "None", ")", ",", "city", "=", "data", ".", "get", "(", "'city'", ",", "None", ")", ",", "state", "=", "data", ".", "get", "(", "'state'", ",", "None", ")", ",", "zip", "=", "data", ".", "get", "(", "'zip'", ",", "None", ")", ",", "country", "=", "data", ".", "get", "(", "'country'", ",", "None", ")", ",", "lat", "=", "data", ".", "get", "(", "'lat'", ",", "None", ")", ",", "lon", "=", "data", ".", "get", "(", "'lon'", ",", "None", ")", ")" ]
Parse a ``MeetupVenue`` from the given response data. Returns ------- A `pythonkc_meetups.types.`MeetupVenue``.
[ "Parse", "a", "MeetupVenue", "from", "the", "given", "response", "data", "." ]
54b5062b2825011c87c303256f59c6c13d395ee7
https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/parsers.py#L44-L65