repository_name
stringlengths
7
55
func_path_in_repository
stringlengths
4
223
func_name
stringlengths
1
134
whole_func_string
stringlengths
75
104k
language
stringclasses
1 value
func_code_string
stringlengths
75
104k
func_code_tokens
listlengths
19
28.4k
func_documentation_string
stringlengths
1
46.9k
func_documentation_tokens
listlengths
1
1.97k
split_name
stringclasses
1 value
func_code_url
stringlengths
87
315
KnorrFG/pyparadigm
pyparadigm/extras.py
_normalize
def _normalize(mat: np.ndarray): """rescales a numpy array, so that min is 0 and max is 255""" return ((mat - mat.min()) * (255 / mat.max())).astype(np.uint8)
python
def _normalize(mat: np.ndarray): """rescales a numpy array, so that min is 0 and max is 255""" return ((mat - mat.min()) * (255 / mat.max())).astype(np.uint8)
[ "def", "_normalize", "(", "mat", ":", "np", ".", "ndarray", ")", ":", "return", "(", "(", "mat", "-", "mat", ".", "min", "(", ")", ")", "*", "(", "255", "/", "mat", ".", "max", "(", ")", ")", ")", ".", "astype", "(", "np", ".", "uint8", ")" ]
rescales a numpy array, so that min is 0 and max is 255
[ "rescales", "a", "numpy", "array", "so", "that", "min", "is", "0", "and", "max", "is", "255" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/extras.py#L15-L17
KnorrFG/pyparadigm
pyparadigm/extras.py
to_24bit_gray
def to_24bit_gray(mat: np.ndarray): """returns a matrix that contains RGB channels, and colors scaled from 0 to 255""" return np.repeat(np.expand_dims(_normalize(mat), axis=2), 3, axis=2)
python
def to_24bit_gray(mat: np.ndarray): """returns a matrix that contains RGB channels, and colors scaled from 0 to 255""" return np.repeat(np.expand_dims(_normalize(mat), axis=2), 3, axis=2)
[ "def", "to_24bit_gray", "(", "mat", ":", "np", ".", "ndarray", ")", ":", "return", "np", ".", "repeat", "(", "np", ".", "expand_dims", "(", "_normalize", "(", "mat", ")", ",", "axis", "=", "2", ")", ",", "3", ",", "axis", "=", "2", ")" ]
returns a matrix that contains RGB channels, and colors scaled from 0 to 255
[ "returns", "a", "matrix", "that", "contains", "RGB", "channels", "and", "colors", "scaled", "from", "0", "to", "255" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/extras.py#L20-L23
KnorrFG/pyparadigm
pyparadigm/extras.py
apply_color_map
def apply_color_map(name: str, mat: np.ndarray = None): """returns an RGB matrix scaled by a matplotlib color map""" def apply_map(mat): return (cm.get_cmap(name)(_normalize(mat))[:, :, :3] * 255).astype(np.uint8) return apply_map if mat is None else apply_map(mat)
python
def apply_color_map(name: str, mat: np.ndarray = None): """returns an RGB matrix scaled by a matplotlib color map""" def apply_map(mat): return (cm.get_cmap(name)(_normalize(mat))[:, :, :3] * 255).astype(np.uint8) return apply_map if mat is None else apply_map(mat)
[ "def", "apply_color_map", "(", "name", ":", "str", ",", "mat", ":", "np", ".", "ndarray", "=", "None", ")", ":", "def", "apply_map", "(", "mat", ")", ":", "return", "(", "cm", ".", "get_cmap", "(", "name", ")", "(", "_normalize", "(", "mat", ")", ")", "[", ":", ",", ":", ",", ":", "3", "]", "*", "255", ")", ".", "astype", "(", "np", ".", "uint8", ")", "return", "apply_map", "if", "mat", "is", "None", "else", "apply_map", "(", "mat", ")" ]
returns an RGB matrix scaled by a matplotlib color map
[ "returns", "an", "RGB", "matrix", "scaled", "by", "a", "matplotlib", "color", "map" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/extras.py#L26-L31
KnorrFG/pyparadigm
pyparadigm/extras.py
mat_to_surface
def mat_to_surface(mat: np.ndarray, transformer=to_24bit_gray): """Can be used to create a pygame.Surface from a 2d numpy array. By default a grey image with scaled colors is returned, but using the transformer argument any transformation can be used. :param mat: the matrix to create the surface of. :type mat: np.ndarray :param transformer: function that transforms the matrix to a valid color matrix, i.e. it must have 3dimension, were the 3rd dimension are the color channels. For each channel a value between 0 and 255 is allowed :type transformer: Callable[np.ndarray[np.ndarray]]""" return pygame.pixelcopy.make_surface(transformer(mat.transpose()) if transformer is not None else mat.transpose())
python
def mat_to_surface(mat: np.ndarray, transformer=to_24bit_gray): """Can be used to create a pygame.Surface from a 2d numpy array. By default a grey image with scaled colors is returned, but using the transformer argument any transformation can be used. :param mat: the matrix to create the surface of. :type mat: np.ndarray :param transformer: function that transforms the matrix to a valid color matrix, i.e. it must have 3dimension, were the 3rd dimension are the color channels. For each channel a value between 0 and 255 is allowed :type transformer: Callable[np.ndarray[np.ndarray]]""" return pygame.pixelcopy.make_surface(transformer(mat.transpose()) if transformer is not None else mat.transpose())
[ "def", "mat_to_surface", "(", "mat", ":", "np", ".", "ndarray", ",", "transformer", "=", "to_24bit_gray", ")", ":", "return", "pygame", ".", "pixelcopy", ".", "make_surface", "(", "transformer", "(", "mat", ".", "transpose", "(", ")", ")", "if", "transformer", "is", "not", "None", "else", "mat", ".", "transpose", "(", ")", ")" ]
Can be used to create a pygame.Surface from a 2d numpy array. By default a grey image with scaled colors is returned, but using the transformer argument any transformation can be used. :param mat: the matrix to create the surface of. :type mat: np.ndarray :param transformer: function that transforms the matrix to a valid color matrix, i.e. it must have 3dimension, were the 3rd dimension are the color channels. For each channel a value between 0 and 255 is allowed :type transformer: Callable[np.ndarray[np.ndarray]]
[ "Can", "be", "used", "to", "create", "a", "pygame", ".", "Surface", "from", "a", "2d", "numpy", "array", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/extras.py#L34-L49
BlackEarth/bl
bl/id.py
random_id
def random_id(length=16, charset=alphanum_chars, first_charset=alpha_chars, sep='', group=0): """Creates a random id with the given length and charset. ## Parameters * length the number of characters in the id * charset what character set to use (a list of characters) * first_charset what character set for the first character * sep='' what character to insert between groups * group=0 how long the groups are (default 0 means no groups) """ t = [] first_chars = list(set(charset).intersection(first_charset)) if len(first_chars) == 0: first_chars = charset t.append(first_chars[random.randrange(len(first_chars))]) for i in range(len(t), length): if (group > 0) and (i % group == 0) and (i < length): t.append(sep) t.append(charset[random.randrange(len(charset))]) return ''.join(t)
python
def random_id(length=16, charset=alphanum_chars, first_charset=alpha_chars, sep='', group=0): """Creates a random id with the given length and charset. ## Parameters * length the number of characters in the id * charset what character set to use (a list of characters) * first_charset what character set for the first character * sep='' what character to insert between groups * group=0 how long the groups are (default 0 means no groups) """ t = [] first_chars = list(set(charset).intersection(first_charset)) if len(first_chars) == 0: first_chars = charset t.append(first_chars[random.randrange(len(first_chars))]) for i in range(len(t), length): if (group > 0) and (i % group == 0) and (i < length): t.append(sep) t.append(charset[random.randrange(len(charset))]) return ''.join(t)
[ "def", "random_id", "(", "length", "=", "16", ",", "charset", "=", "alphanum_chars", ",", "first_charset", "=", "alpha_chars", ",", "sep", "=", "''", ",", "group", "=", "0", ")", ":", "t", "=", "[", "]", "first_chars", "=", "list", "(", "set", "(", "charset", ")", ".", "intersection", "(", "first_charset", ")", ")", "if", "len", "(", "first_chars", ")", "==", "0", ":", "first_chars", "=", "charset", "t", ".", "append", "(", "first_chars", "[", "random", ".", "randrange", "(", "len", "(", "first_chars", ")", ")", "]", ")", "for", "i", "in", "range", "(", "len", "(", "t", ")", ",", "length", ")", ":", "if", "(", "group", ">", "0", ")", "and", "(", "i", "%", "group", "==", "0", ")", "and", "(", "i", "<", "length", ")", ":", "t", ".", "append", "(", "sep", ")", "t", ".", "append", "(", "charset", "[", "random", ".", "randrange", "(", "len", "(", "charset", ")", ")", "]", ")", "return", "''", ".", "join", "(", "t", ")" ]
Creates a random id with the given length and charset. ## Parameters * length the number of characters in the id * charset what character set to use (a list of characters) * first_charset what character set for the first character * sep='' what character to insert between groups * group=0 how long the groups are (default 0 means no groups)
[ "Creates", "a", "random", "id", "with", "the", "given", "length", "and", "charset", ".", "##", "Parameters", "*", "length", "the", "number", "of", "characters", "in", "the", "id", "*", "charset", "what", "character", "set", "to", "use", "(", "a", "list", "of", "characters", ")", "*", "first_charset", "what", "character", "set", "for", "the", "first", "character", "*", "sep", "=", "what", "character", "to", "insert", "between", "groups", "*", "group", "=", "0", "how", "long", "the", "groups", "are", "(", "default", "0", "means", "no", "groups", ")" ]
train
https://github.com/BlackEarth/bl/blob/edf6f37dac718987260b90ad0e7f7fe084a7c1a3/bl/id.py#L60-L82
michaeltcoelho/pagarme.py
pagarme/common/util.py
merge_dict
def merge_dict(data, *args): """Merge any number of dictionaries """ results = {} for current in (data,) + args: results.update(current) return results
python
def merge_dict(data, *args): """Merge any number of dictionaries """ results = {} for current in (data,) + args: results.update(current) return results
[ "def", "merge_dict", "(", "data", ",", "*", "args", ")", ":", "results", "=", "{", "}", "for", "current", "in", "(", "data", ",", ")", "+", "args", ":", "results", ".", "update", "(", "current", ")", "return", "results" ]
Merge any number of dictionaries
[ "Merge", "any", "number", "of", "dictionaries" ]
train
https://github.com/michaeltcoelho/pagarme.py/blob/469fdd6e61e7c24a9eaf23d474d25316c3b5450b/pagarme/common/util.py#L5-L11
michaeltcoelho/pagarme.py
pagarme/common/util.py
make_url
def make_url(url, *paths): """Joins individual URL strings together, and returns a single string. """ for path in paths: url = re.sub(r'/?$', re.sub(r'^/?', '/', path), url) return url
python
def make_url(url, *paths): """Joins individual URL strings together, and returns a single string. """ for path in paths: url = re.sub(r'/?$', re.sub(r'^/?', '/', path), url) return url
[ "def", "make_url", "(", "url", ",", "*", "paths", ")", ":", "for", "path", "in", "paths", ":", "url", "=", "re", ".", "sub", "(", "r'/?$'", ",", "re", ".", "sub", "(", "r'^/?'", ",", "'/'", ",", "path", ")", ",", "url", ")", "return", "url" ]
Joins individual URL strings together, and returns a single string.
[ "Joins", "individual", "URL", "strings", "together", "and", "returns", "a", "single", "string", "." ]
train
https://github.com/michaeltcoelho/pagarme.py/blob/469fdd6e61e7c24a9eaf23d474d25316c3b5450b/pagarme/common/util.py#L14-L19
Eyepea/tanto
monitoring_agent/outputs/email.py
Email.aggregate_result
def aggregate_result(self, return_code, output, service_description='', specific_servers=None): ''' aggregate result ''' if specific_servers == None: specific_servers = self.servers else: specific_servers = set(self.servers).intersection(specific_servers) for server in specific_servers: if not self.servers[server]['send_errors_only'] or return_code > 0: self.servers[server]['results'].append({'return_code': return_code, 'output': output, 'service_description': service_description, 'return_status': STATUSES[return_code][0], 'custom_fqdn': self.servers[server]['custom_fqdn']}) LOG.info("[email][%s][%s]: Aggregate result: %r", service_description, server, self.servers[server]['results'][-1])
python
def aggregate_result(self, return_code, output, service_description='', specific_servers=None): ''' aggregate result ''' if specific_servers == None: specific_servers = self.servers else: specific_servers = set(self.servers).intersection(specific_servers) for server in specific_servers: if not self.servers[server]['send_errors_only'] or return_code > 0: self.servers[server]['results'].append({'return_code': return_code, 'output': output, 'service_description': service_description, 'return_status': STATUSES[return_code][0], 'custom_fqdn': self.servers[server]['custom_fqdn']}) LOG.info("[email][%s][%s]: Aggregate result: %r", service_description, server, self.servers[server]['results'][-1])
[ "def", "aggregate_result", "(", "self", ",", "return_code", ",", "output", ",", "service_description", "=", "''", ",", "specific_servers", "=", "None", ")", ":", "if", "specific_servers", "==", "None", ":", "specific_servers", "=", "self", ".", "servers", "else", ":", "specific_servers", "=", "set", "(", "self", ".", "servers", ")", ".", "intersection", "(", "specific_servers", ")", "for", "server", "in", "specific_servers", ":", "if", "not", "self", ".", "servers", "[", "server", "]", "[", "'send_errors_only'", "]", "or", "return_code", ">", "0", ":", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", ".", "append", "(", "{", "'return_code'", ":", "return_code", ",", "'output'", ":", "output", ",", "'service_description'", ":", "service_description", ",", "'return_status'", ":", "STATUSES", "[", "return_code", "]", "[", "0", "]", ",", "'custom_fqdn'", ":", "self", ".", "servers", "[", "server", "]", "[", "'custom_fqdn'", "]", "}", ")", "LOG", ".", "info", "(", "\"[email][%s][%s]: Aggregate result: %r\"", ",", "service_description", ",", "server", ",", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", "[", "-", "1", "]", ")" ]
aggregate result
[ "aggregate", "result" ]
train
https://github.com/Eyepea/tanto/blob/ad8fd32e0fd3b7bc3dee5dabb984a2567ae46fe9/monitoring_agent/outputs/email.py#L35-L51
Eyepea/tanto
monitoring_agent/outputs/email.py
Email.send_results
def send_results(self): ''' send results ''' for server in self.servers: if self.servers[server]['results']: if len(self.servers[server]['results']) == 1: msg = MIMEText('') msg['Subject'] = '[%(custom_fqdn)s] [%(service_description)s] %(return_status)s: %(output)s' % self.servers[server]['results'][0] else: txt = '' summary = [0, 0, 0, 0] for results in self.servers[server]['results']: txt += '[%(service_description)s] %(return_status)s: %(output)s\n' % results summary[results['return_code']] += 1 msg = MIMEText(txt) subject = '[%(custom_fqdn)s]' % self.servers[server]['results'][0] for i, status in enumerate(STATUSES): subject += ' %s:%s' % (status[0], summary[i]) msg['Subject'] = subject msg['From'] = self.servers[server]['from'] msg['To'] = ', '.join(self.servers[server]['to']) if self.servers[server]['tls']: smtp_server = smtplib.SMTP_SSL(self.servers[server]['host'], self.servers[server]['port']) else: smtp_server = smtplib.SMTP(self.servers[server]['host'], self.servers[server]['port']) if self.servers[server]['login'] and len(self.servers[server]['login']) > 0: smtp_server.login(self.servers[server]['login'], self.servers[server]['password']) smtp_server.sendmail(self.servers[server]['from'], self.servers[server]['to'], msg.as_string()) smtp_server.quit() LOG.info("[email][%s]: e-mail sent from: %s to: %s", server, self.servers[server]['from'], self.servers[server]['to'])
python
def send_results(self): ''' send results ''' for server in self.servers: if self.servers[server]['results']: if len(self.servers[server]['results']) == 1: msg = MIMEText('') msg['Subject'] = '[%(custom_fqdn)s] [%(service_description)s] %(return_status)s: %(output)s' % self.servers[server]['results'][0] else: txt = '' summary = [0, 0, 0, 0] for results in self.servers[server]['results']: txt += '[%(service_description)s] %(return_status)s: %(output)s\n' % results summary[results['return_code']] += 1 msg = MIMEText(txt) subject = '[%(custom_fqdn)s]' % self.servers[server]['results'][0] for i, status in enumerate(STATUSES): subject += ' %s:%s' % (status[0], summary[i]) msg['Subject'] = subject msg['From'] = self.servers[server]['from'] msg['To'] = ', '.join(self.servers[server]['to']) if self.servers[server]['tls']: smtp_server = smtplib.SMTP_SSL(self.servers[server]['host'], self.servers[server]['port']) else: smtp_server = smtplib.SMTP(self.servers[server]['host'], self.servers[server]['port']) if self.servers[server]['login'] and len(self.servers[server]['login']) > 0: smtp_server.login(self.servers[server]['login'], self.servers[server]['password']) smtp_server.sendmail(self.servers[server]['from'], self.servers[server]['to'], msg.as_string()) smtp_server.quit() LOG.info("[email][%s]: e-mail sent from: %s to: %s", server, self.servers[server]['from'], self.servers[server]['to'])
[ "def", "send_results", "(", "self", ")", ":", "for", "server", "in", "self", ".", "servers", ":", "if", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", ":", "if", "len", "(", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", ")", "==", "1", ":", "msg", "=", "MIMEText", "(", "''", ")", "msg", "[", "'Subject'", "]", "=", "'[%(custom_fqdn)s] [%(service_description)s] %(return_status)s: %(output)s'", "%", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", "[", "0", "]", "else", ":", "txt", "=", "''", "summary", "=", "[", "0", ",", "0", ",", "0", ",", "0", "]", "for", "results", "in", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", ":", "txt", "+=", "'[%(service_description)s] %(return_status)s: %(output)s\\n'", "%", "results", "summary", "[", "results", "[", "'return_code'", "]", "]", "+=", "1", "msg", "=", "MIMEText", "(", "txt", ")", "subject", "=", "'[%(custom_fqdn)s]'", "%", "self", ".", "servers", "[", "server", "]", "[", "'results'", "]", "[", "0", "]", "for", "i", ",", "status", "in", "enumerate", "(", "STATUSES", ")", ":", "subject", "+=", "' %s:%s'", "%", "(", "status", "[", "0", "]", ",", "summary", "[", "i", "]", ")", "msg", "[", "'Subject'", "]", "=", "subject", "msg", "[", "'From'", "]", "=", "self", ".", "servers", "[", "server", "]", "[", "'from'", "]", "msg", "[", "'To'", "]", "=", "', '", ".", "join", "(", "self", ".", "servers", "[", "server", "]", "[", "'to'", "]", ")", "if", "self", ".", "servers", "[", "server", "]", "[", "'tls'", "]", ":", "smtp_server", "=", "smtplib", ".", "SMTP_SSL", "(", "self", ".", "servers", "[", "server", "]", "[", "'host'", "]", ",", "self", ".", "servers", "[", "server", "]", "[", "'port'", "]", ")", "else", ":", "smtp_server", "=", "smtplib", ".", "SMTP", "(", "self", ".", "servers", "[", "server", "]", "[", "'host'", "]", ",", "self", ".", "servers", "[", "server", "]", "[", "'port'", "]", ")", "if", "self", ".", "servers", "[", "server", "]", "[", "'login'", "]", "and", "len", "(", "self", ".", "servers", "[", "server", "]", "[", "'login'", "]", ")", ">", "0", ":", "smtp_server", ".", "login", "(", "self", ".", "servers", "[", "server", "]", "[", "'login'", "]", ",", "self", ".", "servers", "[", "server", "]", "[", "'password'", "]", ")", "smtp_server", ".", "sendmail", "(", "self", ".", "servers", "[", "server", "]", "[", "'from'", "]", ",", "self", ".", "servers", "[", "server", "]", "[", "'to'", "]", ",", "msg", ".", "as_string", "(", ")", ")", "smtp_server", ".", "quit", "(", ")", "LOG", ".", "info", "(", "\"[email][%s]: e-mail sent from: %s to: %s\"", ",", "server", ",", "self", ".", "servers", "[", "server", "]", "[", "'from'", "]", ",", "self", ".", "servers", "[", "server", "]", "[", "'to'", "]", ")" ]
send results
[ "send", "results" ]
train
https://github.com/Eyepea/tanto/blob/ad8fd32e0fd3b7bc3dee5dabb984a2567ae46fe9/monitoring_agent/outputs/email.py#L53-L86
RobotStudio/bors
example.py
main
def main(): """MAIN""" config = { "api": { "services": [ { "name": "my_api", "testkey": "testval", }, ], "calls": { "hello_world": { "delay": 5, "priority": 1, "arguments": None, }, "marco": { "delay": 1, "priority": 1, }, "pollo": { "delay": 1, "priority": 1, }, } } } app = AppBuilder([MyAPI], Strategy(Print()), AppConf(config)) app.run()
python
def main(): """MAIN""" config = { "api": { "services": [ { "name": "my_api", "testkey": "testval", }, ], "calls": { "hello_world": { "delay": 5, "priority": 1, "arguments": None, }, "marco": { "delay": 1, "priority": 1, }, "pollo": { "delay": 1, "priority": 1, }, } } } app = AppBuilder([MyAPI], Strategy(Print()), AppConf(config)) app.run()
[ "def", "main", "(", ")", ":", "config", "=", "{", "\"api\"", ":", "{", "\"services\"", ":", "[", "{", "\"name\"", ":", "\"my_api\"", ",", "\"testkey\"", ":", "\"testval\"", ",", "}", ",", "]", ",", "\"calls\"", ":", "{", "\"hello_world\"", ":", "{", "\"delay\"", ":", "5", ",", "\"priority\"", ":", "1", ",", "\"arguments\"", ":", "None", ",", "}", ",", "\"marco\"", ":", "{", "\"delay\"", ":", "1", ",", "\"priority\"", ":", "1", ",", "}", ",", "\"pollo\"", ":", "{", "\"delay\"", ":", "1", ",", "\"priority\"", ":", "1", ",", "}", ",", "}", "}", "}", "app", "=", "AppBuilder", "(", "[", "MyAPI", "]", ",", "Strategy", "(", "Print", "(", ")", ")", ",", "AppConf", "(", "config", ")", ")", "app", ".", "run", "(", ")" ]
MAIN
[ "MAIN" ]
train
https://github.com/RobotStudio/bors/blob/38bf338fc6905d90819faa56bd832140116720f0/example.py#L104-L132
RobotStudio/bors
example.py
RequestSchema.make_request
def make_request(self, data): """Parse the outgoing schema""" sch = MockItemSchema() return Request(**{ "callname": self.context.get("callname"), "payload": sch.dump(data), })
python
def make_request(self, data): """Parse the outgoing schema""" sch = MockItemSchema() return Request(**{ "callname": self.context.get("callname"), "payload": sch.dump(data), })
[ "def", "make_request", "(", "self", ",", "data", ")", ":", "sch", "=", "MockItemSchema", "(", ")", "return", "Request", "(", "*", "*", "{", "\"callname\"", ":", "self", ".", "context", ".", "get", "(", "\"callname\"", ")", ",", "\"payload\"", ":", "sch", ".", "dump", "(", "data", ")", ",", "}", ")" ]
Parse the outgoing schema
[ "Parse", "the", "outgoing", "schema" ]
train
https://github.com/RobotStudio/bors/blob/38bf338fc6905d90819faa56bd832140116720f0/example.py#L58-L64
RobotStudio/bors
example.py
ResponseSchema.populate_data
def populate_data(self, data): """Parse the outgoing schema""" sch = MockItemSchema() return Result(**{ "callname": self.context.get("callname"), "result": sch.dump(data), })
python
def populate_data(self, data): """Parse the outgoing schema""" sch = MockItemSchema() return Result(**{ "callname": self.context.get("callname"), "result": sch.dump(data), })
[ "def", "populate_data", "(", "self", ",", "data", ")", ":", "sch", "=", "MockItemSchema", "(", ")", "return", "Result", "(", "*", "*", "{", "\"callname\"", ":", "self", ".", "context", ".", "get", "(", "\"callname\"", ")", ",", "\"result\"", ":", "sch", ".", "dump", "(", "data", ")", ",", "}", ")" ]
Parse the outgoing schema
[ "Parse", "the", "outgoing", "schema" ]
train
https://github.com/RobotStudio/bors/blob/38bf338fc6905d90819faa56bd832140116720f0/example.py#L74-L80
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
Handler.key_press
def key_press(keys): """returns a handler that can be used with EventListener.listen() and returns when a key in keys is pressed""" return lambda e: e.key if e.type == pygame.KEYDOWN \ and e.key in keys else EventConsumerInfo.DONT_CARE
python
def key_press(keys): """returns a handler that can be used with EventListener.listen() and returns when a key in keys is pressed""" return lambda e: e.key if e.type == pygame.KEYDOWN \ and e.key in keys else EventConsumerInfo.DONT_CARE
[ "def", "key_press", "(", "keys", ")", ":", "return", "lambda", "e", ":", "e", ".", "key", "if", "e", ".", "type", "==", "pygame", ".", "KEYDOWN", "and", "e", ".", "key", "in", "keys", "else", "EventConsumerInfo", ".", "DONT_CARE" ]
returns a handler that can be used with EventListener.listen() and returns when a key in keys is pressed
[ "returns", "a", "handler", "that", "can", "be", "used", "with", "EventListener", ".", "listen", "()", "and", "returns", "when", "a", "key", "in", "keys", "is", "pressed" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L70-L74
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
Handler.unicode_char
def unicode_char(ignored_chars=None): """returns a handler that listens for unicode characters""" return lambda e: e.unicode if e.type == pygame.KEYDOWN \ and ((ignored_chars is None) or (e.unicode not in ignored_chars))\ else EventConsumerInfo.DONT_CARE
python
def unicode_char(ignored_chars=None): """returns a handler that listens for unicode characters""" return lambda e: e.unicode if e.type == pygame.KEYDOWN \ and ((ignored_chars is None) or (e.unicode not in ignored_chars))\ else EventConsumerInfo.DONT_CARE
[ "def", "unicode_char", "(", "ignored_chars", "=", "None", ")", ":", "return", "lambda", "e", ":", "e", ".", "unicode", "if", "e", ".", "type", "==", "pygame", ".", "KEYDOWN", "and", "(", "(", "ignored_chars", "is", "None", ")", "or", "(", "e", ".", "unicode", "not", "in", "ignored_chars", ")", ")", "else", "EventConsumerInfo", ".", "DONT_CARE" ]
returns a handler that listens for unicode characters
[ "returns", "a", "handler", "that", "listens", "for", "unicode", "characters" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L77-L82
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.mouse_area
def mouse_area(self, handler, group=0, ident=None): """Adds a new MouseProxy for the given group to the EventListener.mouse_proxies dict if it is not in there yet, and returns the (new) MouseProxy. In listen() all entries in the current group of mouse_proxies are used.""" key = ident or id(handler) if key not in self.mouse_proxies[group]: self.mouse_proxies[group][key] = MouseProxy(handler, ident) return self.mouse_proxies[group][key]
python
def mouse_area(self, handler, group=0, ident=None): """Adds a new MouseProxy for the given group to the EventListener.mouse_proxies dict if it is not in there yet, and returns the (new) MouseProxy. In listen() all entries in the current group of mouse_proxies are used.""" key = ident or id(handler) if key not in self.mouse_proxies[group]: self.mouse_proxies[group][key] = MouseProxy(handler, ident) return self.mouse_proxies[group][key]
[ "def", "mouse_area", "(", "self", ",", "handler", ",", "group", "=", "0", ",", "ident", "=", "None", ")", ":", "key", "=", "ident", "or", "id", "(", "handler", ")", "if", "key", "not", "in", "self", ".", "mouse_proxies", "[", "group", "]", ":", "self", ".", "mouse_proxies", "[", "group", "]", "[", "key", "]", "=", "MouseProxy", "(", "handler", ",", "ident", ")", "return", "self", ".", "mouse_proxies", "[", "group", "]", "[", "key", "]" ]
Adds a new MouseProxy for the given group to the EventListener.mouse_proxies dict if it is not in there yet, and returns the (new) MouseProxy. In listen() all entries in the current group of mouse_proxies are used.
[ "Adds", "a", "new", "MouseProxy", "for", "the", "given", "group", "to", "the", "EventListener", ".", "mouse_proxies", "dict", "if", "it", "is", "not", "in", "there", "yet", "and", "returns", "the", "(", "new", ")", "MouseProxy", ".", "In", "listen", "()", "all", "entries", "in", "the", "current", "group", "of", "mouse_proxies", "are", "used", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L167-L175
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.listen
def listen(self, *temporary_handlers): """When listen() is called all queued pygame.Events will be passed to all registered listeners. There are two ways to register a listener: 1. as a permanent listener, that is always executed for every event. These are registered by passing the handler-functions during construction 2. as a temporary listener, that will only be executed during the current call to listen(). These are registered by passing the handler functions as arguments to listen() When a handler is called it can provoke three different reactions through its return value. 1. It can return EventConsumerInfo.DONT_CARE in which case the EventListener will pass the event to the next handler in line, or go to the next event, if the last handler was called. 2. It can return EventConsumerInfo.CONSUMED in which case the event will not be passed to following handlers, and the next event in line will be processed. 3. It can return anything else (including None, which will be returned if no return value is specified) in this case the listen()-method will return the result of the handler. Therefore all permanent handlers should usually return EventConsumerInfo.DONT_CARE """ funcs = tuple(itt.chain(self.permanent_handlers, (proxy.listener for proxy in self.mouse_proxies[self.proxy_group].values()), temporary_handlers)) for event in self._get_q(): for func in funcs: ret = func(event) if ret == EventConsumerInfo.CONSUMED: break if ret == EventConsumerInfo.DONT_CARE: continue else: return ret
python
def listen(self, *temporary_handlers): """When listen() is called all queued pygame.Events will be passed to all registered listeners. There are two ways to register a listener: 1. as a permanent listener, that is always executed for every event. These are registered by passing the handler-functions during construction 2. as a temporary listener, that will only be executed during the current call to listen(). These are registered by passing the handler functions as arguments to listen() When a handler is called it can provoke three different reactions through its return value. 1. It can return EventConsumerInfo.DONT_CARE in which case the EventListener will pass the event to the next handler in line, or go to the next event, if the last handler was called. 2. It can return EventConsumerInfo.CONSUMED in which case the event will not be passed to following handlers, and the next event in line will be processed. 3. It can return anything else (including None, which will be returned if no return value is specified) in this case the listen()-method will return the result of the handler. Therefore all permanent handlers should usually return EventConsumerInfo.DONT_CARE """ funcs = tuple(itt.chain(self.permanent_handlers, (proxy.listener for proxy in self.mouse_proxies[self.proxy_group].values()), temporary_handlers)) for event in self._get_q(): for func in funcs: ret = func(event) if ret == EventConsumerInfo.CONSUMED: break if ret == EventConsumerInfo.DONT_CARE: continue else: return ret
[ "def", "listen", "(", "self", ",", "*", "temporary_handlers", ")", ":", "funcs", "=", "tuple", "(", "itt", ".", "chain", "(", "self", ".", "permanent_handlers", ",", "(", "proxy", ".", "listener", "for", "proxy", "in", "self", ".", "mouse_proxies", "[", "self", ".", "proxy_group", "]", ".", "values", "(", ")", ")", ",", "temporary_handlers", ")", ")", "for", "event", "in", "self", ".", "_get_q", "(", ")", ":", "for", "func", "in", "funcs", ":", "ret", "=", "func", "(", "event", ")", "if", "ret", "==", "EventConsumerInfo", ".", "CONSUMED", ":", "break", "if", "ret", "==", "EventConsumerInfo", ".", "DONT_CARE", ":", "continue", "else", ":", "return", "ret" ]
When listen() is called all queued pygame.Events will be passed to all registered listeners. There are two ways to register a listener: 1. as a permanent listener, that is always executed for every event. These are registered by passing the handler-functions during construction 2. as a temporary listener, that will only be executed during the current call to listen(). These are registered by passing the handler functions as arguments to listen() When a handler is called it can provoke three different reactions through its return value. 1. It can return EventConsumerInfo.DONT_CARE in which case the EventListener will pass the event to the next handler in line, or go to the next event, if the last handler was called. 2. It can return EventConsumerInfo.CONSUMED in which case the event will not be passed to following handlers, and the next event in line will be processed. 3. It can return anything else (including None, which will be returned if no return value is specified) in this case the listen()-method will return the result of the handler. Therefore all permanent handlers should usually return EventConsumerInfo.DONT_CARE
[ "When", "listen", "()", "is", "called", "all", "queued", "pygame", ".", "Events", "will", "be", "passed", "to", "all", "registered", "listeners", ".", "There", "are", "two", "ways", "to", "register", "a", "listener", ":" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L183-L225
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.listen_until_return
def listen_until_return(self, *temporary_handlers, timeout=0): """Calls listen repeatedly until listen returns something else than None. Then returns listen's result. If timeout is not zero listen_until_return stops after timeout seconds and returns None.""" start = time.time() while timeout == 0 or time.time() - start < timeout: res = self.listen(*temporary_handlers) if res is not None: return res
python
def listen_until_return(self, *temporary_handlers, timeout=0): """Calls listen repeatedly until listen returns something else than None. Then returns listen's result. If timeout is not zero listen_until_return stops after timeout seconds and returns None.""" start = time.time() while timeout == 0 or time.time() - start < timeout: res = self.listen(*temporary_handlers) if res is not None: return res
[ "def", "listen_until_return", "(", "self", ",", "*", "temporary_handlers", ",", "timeout", "=", "0", ")", ":", "start", "=", "time", ".", "time", "(", ")", "while", "timeout", "==", "0", "or", "time", ".", "time", "(", ")", "-", "start", "<", "timeout", ":", "res", "=", "self", ".", "listen", "(", "*", "temporary_handlers", ")", "if", "res", "is", "not", "None", ":", "return", "res" ]
Calls listen repeatedly until listen returns something else than None. Then returns listen's result. If timeout is not zero listen_until_return stops after timeout seconds and returns None.
[ "Calls", "listen", "repeatedly", "until", "listen", "returns", "something", "else", "than", "None", ".", "Then", "returns", "listen", "s", "result", ".", "If", "timeout", "is", "not", "zero", "listen_until_return", "stops", "after", "timeout", "seconds", "and", "returns", "None", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L227-L235
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.wait_for_n_keypresses
def wait_for_n_keypresses(self, key, n=1): """Waits till one key was pressed n times. :param key: the key to be pressed as defined by pygame. E.g. pygame.K_LEFT for the left arrow key :type key: int :param n: number of repetitions till the function returns :type n: int """ my_const = "key_consumed" counter = 0 def keypress_listener(e): return my_const \ if e.type == pygame.KEYDOWN and e.key == key \ else EventConsumerInfo.DONT_CARE while counter < n: if self.listen(keypress_listener) == my_const: counter += 1
python
def wait_for_n_keypresses(self, key, n=1): """Waits till one key was pressed n times. :param key: the key to be pressed as defined by pygame. E.g. pygame.K_LEFT for the left arrow key :type key: int :param n: number of repetitions till the function returns :type n: int """ my_const = "key_consumed" counter = 0 def keypress_listener(e): return my_const \ if e.type == pygame.KEYDOWN and e.key == key \ else EventConsumerInfo.DONT_CARE while counter < n: if self.listen(keypress_listener) == my_const: counter += 1
[ "def", "wait_for_n_keypresses", "(", "self", ",", "key", ",", "n", "=", "1", ")", ":", "my_const", "=", "\"key_consumed\"", "counter", "=", "0", "def", "keypress_listener", "(", "e", ")", ":", "return", "my_const", "if", "e", ".", "type", "==", "pygame", ".", "KEYDOWN", "and", "e", ".", "key", "==", "key", "else", "EventConsumerInfo", ".", "DONT_CARE", "while", "counter", "<", "n", ":", "if", "self", ".", "listen", "(", "keypress_listener", ")", "==", "my_const", ":", "counter", "+=", "1" ]
Waits till one key was pressed n times. :param key: the key to be pressed as defined by pygame. E.g. pygame.K_LEFT for the left arrow key :type key: int :param n: number of repetitions till the function returns :type n: int
[ "Waits", "till", "one", "key", "was", "pressed", "n", "times", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L237-L255
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.wait_for_keys
def wait_for_keys(self, *keys, timeout=0): """Waits until one of the specified keys was pressed, and returns which key was pressed. :param keys: iterable of integers of pygame-keycodes, or simply multiple keys passed via multiple arguments :type keys: iterable :param timeout: number of seconds to wait till the function returns :type timeout: float :returns: The keycode of the pressed key, or None in case of timeout :rtype: int """ if len(keys) == 1 and _is_iterable(keys[0]): keys = keys[0] return self.listen_until_return(Handler.key_press(keys), timeout=timeout)
python
def wait_for_keys(self, *keys, timeout=0): """Waits until one of the specified keys was pressed, and returns which key was pressed. :param keys: iterable of integers of pygame-keycodes, or simply multiple keys passed via multiple arguments :type keys: iterable :param timeout: number of seconds to wait till the function returns :type timeout: float :returns: The keycode of the pressed key, or None in case of timeout :rtype: int """ if len(keys) == 1 and _is_iterable(keys[0]): keys = keys[0] return self.listen_until_return(Handler.key_press(keys), timeout=timeout)
[ "def", "wait_for_keys", "(", "self", ",", "*", "keys", ",", "timeout", "=", "0", ")", ":", "if", "len", "(", "keys", ")", "==", "1", "and", "_is_iterable", "(", "keys", "[", "0", "]", ")", ":", "keys", "=", "keys", "[", "0", "]", "return", "self", ".", "listen_until_return", "(", "Handler", ".", "key_press", "(", "keys", ")", ",", "timeout", "=", "timeout", ")" ]
Waits until one of the specified keys was pressed, and returns which key was pressed. :param keys: iterable of integers of pygame-keycodes, or simply multiple keys passed via multiple arguments :type keys: iterable :param timeout: number of seconds to wait till the function returns :type timeout: float :returns: The keycode of the pressed key, or None in case of timeout :rtype: int
[ "Waits", "until", "one", "of", "the", "specified", "keys", "was", "pressed", "and", "returns", "which", "key", "was", "pressed", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L257-L273
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.wait_for_keys_modified
def wait_for_keys_modified(self, *keys, modifiers_to_check=_mod_keys, timeout=0): """The same as wait_for_keys, but returns a frozen_set which contains the pressed key, and the modifier keys. :param modifiers_to_check: iterable of modifiers for which the function will check whether they are pressed :type modifiers: Iterable[int]""" set_mods = pygame.key.get_mods() return frozenset.union( frozenset([self.wait_for_keys(*keys, timeout=timeout)]), EventListener._contained_modifiers(set_mods, modifiers_to_check))
python
def wait_for_keys_modified(self, *keys, modifiers_to_check=_mod_keys, timeout=0): """The same as wait_for_keys, but returns a frozen_set which contains the pressed key, and the modifier keys. :param modifiers_to_check: iterable of modifiers for which the function will check whether they are pressed :type modifiers: Iterable[int]""" set_mods = pygame.key.get_mods() return frozenset.union( frozenset([self.wait_for_keys(*keys, timeout=timeout)]), EventListener._contained_modifiers(set_mods, modifiers_to_check))
[ "def", "wait_for_keys_modified", "(", "self", ",", "*", "keys", ",", "modifiers_to_check", "=", "_mod_keys", ",", "timeout", "=", "0", ")", ":", "set_mods", "=", "pygame", ".", "key", ".", "get_mods", "(", ")", "return", "frozenset", ".", "union", "(", "frozenset", "(", "[", "self", ".", "wait_for_keys", "(", "*", "keys", ",", "timeout", "=", "timeout", ")", "]", ")", ",", "EventListener", ".", "_contained_modifiers", "(", "set_mods", ",", "modifiers_to_check", ")", ")" ]
The same as wait_for_keys, but returns a frozen_set which contains the pressed key, and the modifier keys. :param modifiers_to_check: iterable of modifiers for which the function will check whether they are pressed :type modifiers: Iterable[int]
[ "The", "same", "as", "wait_for_keys", "but", "returns", "a", "frozen_set", "which", "contains", "the", "pressed", "key", "and", "the", "modifier", "keys", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L275-L288
KnorrFG/pyparadigm
pyparadigm/eventlistener.py
EventListener.wait_for_unicode_char
def wait_for_unicode_char(self, ignored_chars=None, timeout=0): """Returns a str that contains the single character that was pressed. This already respects modifier keys and keyboard layouts. If timeout is not none and no key is pressed within the specified timeout, None is returned. If a key is ingnored_chars it will be ignored. As argument for irgnored_chars any object that has a __contains__ method can be used, e.g. a string, a set, a list, etc""" return self.listen_until_return(Handler.unicode_char(ignored_chars), timeout=timeout)
python
def wait_for_unicode_char(self, ignored_chars=None, timeout=0): """Returns a str that contains the single character that was pressed. This already respects modifier keys and keyboard layouts. If timeout is not none and no key is pressed within the specified timeout, None is returned. If a key is ingnored_chars it will be ignored. As argument for irgnored_chars any object that has a __contains__ method can be used, e.g. a string, a set, a list, etc""" return self.listen_until_return(Handler.unicode_char(ignored_chars), timeout=timeout)
[ "def", "wait_for_unicode_char", "(", "self", ",", "ignored_chars", "=", "None", ",", "timeout", "=", "0", ")", ":", "return", "self", ".", "listen_until_return", "(", "Handler", ".", "unicode_char", "(", "ignored_chars", ")", ",", "timeout", "=", "timeout", ")" ]
Returns a str that contains the single character that was pressed. This already respects modifier keys and keyboard layouts. If timeout is not none and no key is pressed within the specified timeout, None is returned. If a key is ingnored_chars it will be ignored. As argument for irgnored_chars any object that has a __contains__ method can be used, e.g. a string, a set, a list, etc
[ "Returns", "a", "str", "that", "contains", "the", "single", "character", "that", "was", "pressed", ".", "This", "already", "respects", "modifier", "keys", "and", "keyboard", "layouts", ".", "If", "timeout", "is", "not", "none", "and", "no", "key", "is", "pressed", "within", "the", "specified", "timeout", "None", "is", "returned", ".", "If", "a", "key", "is", "ingnored_chars", "it", "will", "be", "ignored", ".", "As", "argument", "for", "irgnored_chars", "any", "object", "that", "has", "a", "__contains__", "method", "can", "be", "used", "e", ".", "g", ".", "a", "string", "a", "set", "a", "list", "etc" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/eventlistener.py#L295-L303
JohnDoee/thomas
thomas/streamers/rar.py
RarStreamer._find_all_first_files
def _find_all_first_files(self, item): """ Does not support the full range of ways rar can split as it'd require reading the file to ensure you are using the correct way. """ for listed_item in item.list(): new_style = re.findall(r'(?i)\.part(\d+)\.rar^', listed_item.id) if new_style: if int(new_style[0]) == 1: yield 'new', listed_item elif listed_item.id.lower().endswith('.rar'): yield 'old', listed_item
python
def _find_all_first_files(self, item): """ Does not support the full range of ways rar can split as it'd require reading the file to ensure you are using the correct way. """ for listed_item in item.list(): new_style = re.findall(r'(?i)\.part(\d+)\.rar^', listed_item.id) if new_style: if int(new_style[0]) == 1: yield 'new', listed_item elif listed_item.id.lower().endswith('.rar'): yield 'old', listed_item
[ "def", "_find_all_first_files", "(", "self", ",", "item", ")", ":", "for", "listed_item", "in", "item", ".", "list", "(", ")", ":", "new_style", "=", "re", ".", "findall", "(", "r'(?i)\\.part(\\d+)\\.rar^'", ",", "listed_item", ".", "id", ")", "if", "new_style", ":", "if", "int", "(", "new_style", "[", "0", "]", ")", "==", "1", ":", "yield", "'new'", ",", "listed_item", "elif", "listed_item", ".", "id", ".", "lower", "(", ")", ".", "endswith", "(", "'.rar'", ")", ":", "yield", "'old'", ",", "listed_item" ]
Does not support the full range of ways rar can split as it'd require reading the file to ensure you are using the correct way.
[ "Does", "not", "support", "the", "full", "range", "of", "ways", "rar", "can", "split", "as", "it", "d", "require", "reading", "the", "file", "to", "ensure", "you", "are", "using", "the", "correct", "way", "." ]
train
https://github.com/JohnDoee/thomas/blob/51916dd110098b189a1c2fbcb71794fd9ec94832/thomas/streamers/rar.py#L15-L27
20c/facsimile
facsimile/codec.py
find_datafile
def find_datafile(name, search_path, codecs=get_codecs()): """ find all matching data files in search_path search_path: path of directories to load from codecs: allow to override from list of installed returns array of tuples (codec_object, filename) """ return munge.find_datafile(name, search_path, codecs)
python
def find_datafile(name, search_path, codecs=get_codecs()): """ find all matching data files in search_path search_path: path of directories to load from codecs: allow to override from list of installed returns array of tuples (codec_object, filename) """ return munge.find_datafile(name, search_path, codecs)
[ "def", "find_datafile", "(", "name", ",", "search_path", ",", "codecs", "=", "get_codecs", "(", ")", ")", ":", "return", "munge", ".", "find_datafile", "(", "name", ",", "search_path", ",", "codecs", ")" ]
find all matching data files in search_path search_path: path of directories to load from codecs: allow to override from list of installed returns array of tuples (codec_object, filename)
[ "find", "all", "matching", "data", "files", "in", "search_path", "search_path", ":", "path", "of", "directories", "to", "load", "from", "codecs", ":", "allow", "to", "override", "from", "list", "of", "installed", "returns", "array", "of", "tuples", "(", "codec_object", "filename", ")" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/codec.py#L14-L21
20c/facsimile
facsimile/codec.py
load_datafile
def load_datafile(name, search_path, codecs=get_codecs(), **kwargs): """ find datafile and load them from codec TODO only does the first one kwargs: default = if passed will return that on failure instead of throwing """ return munge.load_datafile(name, search_path, codecs, **kwargs)
python
def load_datafile(name, search_path, codecs=get_codecs(), **kwargs): """ find datafile and load them from codec TODO only does the first one kwargs: default = if passed will return that on failure instead of throwing """ return munge.load_datafile(name, search_path, codecs, **kwargs)
[ "def", "load_datafile", "(", "name", ",", "search_path", ",", "codecs", "=", "get_codecs", "(", ")", ",", "*", "*", "kwargs", ")", ":", "return", "munge", ".", "load_datafile", "(", "name", ",", "search_path", ",", "codecs", ",", "*", "*", "kwargs", ")" ]
find datafile and load them from codec TODO only does the first one kwargs: default = if passed will return that on failure instead of throwing
[ "find", "datafile", "and", "load", "them", "from", "codec", "TODO", "only", "does", "the", "first", "one", "kwargs", ":", "default", "=", "if", "passed", "will", "return", "that", "on", "failure", "instead", "of", "throwing" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/codec.py#L23-L30
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton.cred_init
def cred_init( self, *, secrets_dir: str, log: Logger, bot_name: str, ) -> None: """ Initialize what requires credentials/secret files. :param secrets_dir: dir to expect credentials in and store logs/history in. :param log: logger to use for log output. :param bot_name: name of this bot, used for various kinds of labelling. :returns: none. """ super().__init__(secrets_dir=secrets_dir, log=log, bot_name=bot_name) self.ldebug("Retrieving CONSUMER_KEY...") with open(path.join(self.secrets_dir, "CONSUMER_KEY")) as f: CONSUMER_KEY = f.read().strip() self.ldebug("Retrieving CONSUMER_SECRET...") with open(path.join(self.secrets_dir, "CONSUMER_SECRET")) as f: CONSUMER_SECRET = f.read().strip() self.ldebug("Retrieving ACCESS_TOKEN...") with open(path.join(self.secrets_dir, "ACCESS_TOKEN")) as f: ACCESS_TOKEN = f.read().strip() self.ldebug("Retrieving ACCESS_SECRET...") with open(path.join(self.secrets_dir, "ACCESS_SECRET")) as f: ACCESS_SECRET = f.read().strip() self.ldebug("Looking for OWNER_HANDLE...") owner_handle_path = path.join(self.secrets_dir, "OWNER_HANDLE") if path.isfile(owner_handle_path): with open(owner_handle_path) as f: self.owner_handle = f.read().strip() else: self.ldebug("Couldn't find OWNER_HANDLE, unable to DM...") self.owner_handle = "" self.auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) self.auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET) self.api = tweepy.API(self.auth)
python
def cred_init( self, *, secrets_dir: str, log: Logger, bot_name: str, ) -> None: """ Initialize what requires credentials/secret files. :param secrets_dir: dir to expect credentials in and store logs/history in. :param log: logger to use for log output. :param bot_name: name of this bot, used for various kinds of labelling. :returns: none. """ super().__init__(secrets_dir=secrets_dir, log=log, bot_name=bot_name) self.ldebug("Retrieving CONSUMER_KEY...") with open(path.join(self.secrets_dir, "CONSUMER_KEY")) as f: CONSUMER_KEY = f.read().strip() self.ldebug("Retrieving CONSUMER_SECRET...") with open(path.join(self.secrets_dir, "CONSUMER_SECRET")) as f: CONSUMER_SECRET = f.read().strip() self.ldebug("Retrieving ACCESS_TOKEN...") with open(path.join(self.secrets_dir, "ACCESS_TOKEN")) as f: ACCESS_TOKEN = f.read().strip() self.ldebug("Retrieving ACCESS_SECRET...") with open(path.join(self.secrets_dir, "ACCESS_SECRET")) as f: ACCESS_SECRET = f.read().strip() self.ldebug("Looking for OWNER_HANDLE...") owner_handle_path = path.join(self.secrets_dir, "OWNER_HANDLE") if path.isfile(owner_handle_path): with open(owner_handle_path) as f: self.owner_handle = f.read().strip() else: self.ldebug("Couldn't find OWNER_HANDLE, unable to DM...") self.owner_handle = "" self.auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) self.auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET) self.api = tweepy.API(self.auth)
[ "def", "cred_init", "(", "self", ",", "*", ",", "secrets_dir", ":", "str", ",", "log", ":", "Logger", ",", "bot_name", ":", "str", ",", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "secrets_dir", "=", "secrets_dir", ",", "log", "=", "log", ",", "bot_name", "=", "bot_name", ")", "self", ".", "ldebug", "(", "\"Retrieving CONSUMER_KEY...\"", ")", "with", "open", "(", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"CONSUMER_KEY\"", ")", ")", "as", "f", ":", "CONSUMER_KEY", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "self", ".", "ldebug", "(", "\"Retrieving CONSUMER_SECRET...\"", ")", "with", "open", "(", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"CONSUMER_SECRET\"", ")", ")", "as", "f", ":", "CONSUMER_SECRET", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "self", ".", "ldebug", "(", "\"Retrieving ACCESS_TOKEN...\"", ")", "with", "open", "(", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"ACCESS_TOKEN\"", ")", ")", "as", "f", ":", "ACCESS_TOKEN", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "self", ".", "ldebug", "(", "\"Retrieving ACCESS_SECRET...\"", ")", "with", "open", "(", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"ACCESS_SECRET\"", ")", ")", "as", "f", ":", "ACCESS_SECRET", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "self", ".", "ldebug", "(", "\"Looking for OWNER_HANDLE...\"", ")", "owner_handle_path", "=", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"OWNER_HANDLE\"", ")", "if", "path", ".", "isfile", "(", "owner_handle_path", ")", ":", "with", "open", "(", "owner_handle_path", ")", "as", "f", ":", "self", ".", "owner_handle", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "else", ":", "self", ".", "ldebug", "(", "\"Couldn't find OWNER_HANDLE, unable to DM...\"", ")", "self", ".", "owner_handle", "=", "\"\"", "self", ".", "auth", "=", "tweepy", ".", "OAuthHandler", "(", "CONSUMER_KEY", ",", "CONSUMER_SECRET", ")", "self", ".", "auth", ".", "set_access_token", "(", "ACCESS_TOKEN", ",", "ACCESS_SECRET", ")", "self", ".", "api", "=", "tweepy", ".", "API", "(", "self", ".", "auth", ")" ]
Initialize what requires credentials/secret files. :param secrets_dir: dir to expect credentials in and store logs/history in. :param log: logger to use for log output. :param bot_name: name of this bot, used for various kinds of labelling. :returns: none.
[ "Initialize", "what", "requires", "credentials", "/", "secret", "files", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L23-L69
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton.send
def send( self, *, text: str, ) -> List[OutputRecord]: """ Send birdsite message. :param text: text to send in post. :returns: list of output records, each corresponding to either a single post, or an error. """ try: status = self.api.update_status(text) self.ldebug(f"Status object from tweet: {status}.") return [TweetRecord(record_data={"tweet_id": status._json["id"], "text": text})] except tweepy.TweepError as e: return [self.handle_error( message=(f"Bot {self.bot_name} encountered an error when " f"sending post {text} without media:\n{e}\n"), error=e)]
python
def send( self, *, text: str, ) -> List[OutputRecord]: """ Send birdsite message. :param text: text to send in post. :returns: list of output records, each corresponding to either a single post, or an error. """ try: status = self.api.update_status(text) self.ldebug(f"Status object from tweet: {status}.") return [TweetRecord(record_data={"tweet_id": status._json["id"], "text": text})] except tweepy.TweepError as e: return [self.handle_error( message=(f"Bot {self.bot_name} encountered an error when " f"sending post {text} without media:\n{e}\n"), error=e)]
[ "def", "send", "(", "self", ",", "*", ",", "text", ":", "str", ",", ")", "->", "List", "[", "OutputRecord", "]", ":", "try", ":", "status", "=", "self", ".", "api", ".", "update_status", "(", "text", ")", "self", ".", "ldebug", "(", "f\"Status object from tweet: {status}.\"", ")", "return", "[", "TweetRecord", "(", "record_data", "=", "{", "\"tweet_id\"", ":", "status", ".", "_json", "[", "\"id\"", "]", ",", "\"text\"", ":", "text", "}", ")", "]", "except", "tweepy", ".", "TweepError", "as", "e", ":", "return", "[", "self", ".", "handle_error", "(", "message", "=", "(", "f\"Bot {self.bot_name} encountered an error when \"", "f\"sending post {text} without media:\\n{e}\\n\"", ")", ",", "error", "=", "e", ")", "]" ]
Send birdsite message. :param text: text to send in post. :returns: list of output records, each corresponding to either a single post, or an error.
[ "Send", "birdsite", "message", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L71-L93
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton.send_with_media
def send_with_media( self, *, text: str, files: List[str], captions: List[str]=[] ) -> List[OutputRecord]: """ Upload media to birdsite, and send status and media, and captions if present. :param text: tweet text. :param files: list of files to upload with post. :param captions: list of captions to include as alt-text with files. :returns: list of output records, each corresponding to either a single post, or an error. """ # upload media media_ids = None try: self.ldebug(f"Uploading files {files}.") media_ids = [self.api.media_upload(file).media_id_string for file in files] except tweepy.TweepError as e: return [self.handle_error( message=f"Bot {self.bot_name} encountered an error when uploading {files}:\n{e}\n", error=e)] # apply captions, if present self._handle_caption_upload(media_ids=media_ids, captions=captions) # send status try: status = self.api.update_status(status=text, media_ids=media_ids) self.ldebug(f"Status object from tweet: {status}.") return [TweetRecord(record_data={ "tweet_id": status._json["id"], "text": text, "media_ids": media_ids, "captions": captions, "files": files })] except tweepy.TweepError as e: return [self.handle_error( message=(f"Bot {self.bot_name} encountered an error when " f"sending post {text} with media ids {media_ids}:\n{e}\n"), error=e)]
python
def send_with_media( self, *, text: str, files: List[str], captions: List[str]=[] ) -> List[OutputRecord]: """ Upload media to birdsite, and send status and media, and captions if present. :param text: tweet text. :param files: list of files to upload with post. :param captions: list of captions to include as alt-text with files. :returns: list of output records, each corresponding to either a single post, or an error. """ # upload media media_ids = None try: self.ldebug(f"Uploading files {files}.") media_ids = [self.api.media_upload(file).media_id_string for file in files] except tweepy.TweepError as e: return [self.handle_error( message=f"Bot {self.bot_name} encountered an error when uploading {files}:\n{e}\n", error=e)] # apply captions, if present self._handle_caption_upload(media_ids=media_ids, captions=captions) # send status try: status = self.api.update_status(status=text, media_ids=media_ids) self.ldebug(f"Status object from tweet: {status}.") return [TweetRecord(record_data={ "tweet_id": status._json["id"], "text": text, "media_ids": media_ids, "captions": captions, "files": files })] except tweepy.TweepError as e: return [self.handle_error( message=(f"Bot {self.bot_name} encountered an error when " f"sending post {text} with media ids {media_ids}:\n{e}\n"), error=e)]
[ "def", "send_with_media", "(", "self", ",", "*", ",", "text", ":", "str", ",", "files", ":", "List", "[", "str", "]", ",", "captions", ":", "List", "[", "str", "]", "=", "[", "]", ")", "->", "List", "[", "OutputRecord", "]", ":", "# upload media", "media_ids", "=", "None", "try", ":", "self", ".", "ldebug", "(", "f\"Uploading files {files}.\"", ")", "media_ids", "=", "[", "self", ".", "api", ".", "media_upload", "(", "file", ")", ".", "media_id_string", "for", "file", "in", "files", "]", "except", "tweepy", ".", "TweepError", "as", "e", ":", "return", "[", "self", ".", "handle_error", "(", "message", "=", "f\"Bot {self.bot_name} encountered an error when uploading {files}:\\n{e}\\n\"", ",", "error", "=", "e", ")", "]", "# apply captions, if present", "self", ".", "_handle_caption_upload", "(", "media_ids", "=", "media_ids", ",", "captions", "=", "captions", ")", "# send status", "try", ":", "status", "=", "self", ".", "api", ".", "update_status", "(", "status", "=", "text", ",", "media_ids", "=", "media_ids", ")", "self", ".", "ldebug", "(", "f\"Status object from tweet: {status}.\"", ")", "return", "[", "TweetRecord", "(", "record_data", "=", "{", "\"tweet_id\"", ":", "status", ".", "_json", "[", "\"id\"", "]", ",", "\"text\"", ":", "text", ",", "\"media_ids\"", ":", "media_ids", ",", "\"captions\"", ":", "captions", ",", "\"files\"", ":", "files", "}", ")", "]", "except", "tweepy", ".", "TweepError", "as", "e", ":", "return", "[", "self", ".", "handle_error", "(", "message", "=", "(", "f\"Bot {self.bot_name} encountered an error when \"", "f\"sending post {text} with media ids {media_ids}:\\n{e}\\n\"", ")", ",", "error", "=", "e", ")", "]" ]
Upload media to birdsite, and send status and media, and captions if present. :param text: tweet text. :param files: list of files to upload with post. :param captions: list of captions to include as alt-text with files. :returns: list of output records, each corresponding to either a single post, or an error.
[ "Upload", "media", "to", "birdsite", "and", "send", "status", "and", "media", "and", "captions", "if", "present", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L95-L144
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton.perform_batch_reply
def perform_batch_reply( self, *, callback: Callable[..., str], lookback_limit: int, target_handle: str, ) -> List[OutputRecord]: """ Performs batch reply on target account. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param target: the id of the target account. :param lookback_limit: a lookback limit of how many messages to consider. :returns: list of output records, each corresponding to either a single post, or an error. """ self.log.info(f"Attempting to batch reply to birdsite user {target_handle}") if "@" in target_handle: base_target_handle = target_handle[1:] else: base_target_handle = target_handle records: List[OutputRecord] = [] statuses = self.api.user_timeline(screen_name=base_target_handle, count=lookback_limit) self.log.debug(f"Retrieved {len(statuses)} statuses.") for i, status in enumerate(statuses): self.log.debug(f"Processing status {i} of {len(statuses)}") status_id = status.id # find possible replies we've made. # the 10 * lookback_limit is a guess, # might not be enough and I'm not sure we can guarantee it is. our_statuses = self.api.user_timeline(since_id=status_id, count=lookback_limit * 10) in_reply_to_ids = list(map(lambda x: x.in_reply_to_status_id, our_statuses)) if status_id not in in_reply_to_ids: # the twitter API and tweepy will attempt to give us the truncated text of the # message if we don't do this roundabout thing. encoded_status_text = self.api.get_status(status_id, tweet_mode="extended")._json["full_text"] status_text = html.unescape(encoded_status_text) message = callback(message_id=status_id, message=status_text, extra_keys={}) full_message = f"@{base_target_handle} {message}" self.log.info(f"Trying to reply with {message} to status {status_id} " f"from {target_handle}.") try: new_status = self.api.update_status(status=full_message, in_reply_to_status_id=status_id) records.append(TweetRecord(record_data={ "tweet_id": new_status.id, "in_reply_to": f"@{base_target_handle}", "in_reply_to_id": status_id, "text": full_message, })) except tweepy.TweepError as e: records.append(self.handle_error( message=(f"Bot {self.bot_name} encountered an error when " f"trying to reply to {status_id} with {message}:\n{e}\n"), error=e)) else: self.log.info(f"Not replying to status {status_id} from {target_handle} " f"- we already replied.") return records
python
def perform_batch_reply( self, *, callback: Callable[..., str], lookback_limit: int, target_handle: str, ) -> List[OutputRecord]: """ Performs batch reply on target account. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param target: the id of the target account. :param lookback_limit: a lookback limit of how many messages to consider. :returns: list of output records, each corresponding to either a single post, or an error. """ self.log.info(f"Attempting to batch reply to birdsite user {target_handle}") if "@" in target_handle: base_target_handle = target_handle[1:] else: base_target_handle = target_handle records: List[OutputRecord] = [] statuses = self.api.user_timeline(screen_name=base_target_handle, count=lookback_limit) self.log.debug(f"Retrieved {len(statuses)} statuses.") for i, status in enumerate(statuses): self.log.debug(f"Processing status {i} of {len(statuses)}") status_id = status.id # find possible replies we've made. # the 10 * lookback_limit is a guess, # might not be enough and I'm not sure we can guarantee it is. our_statuses = self.api.user_timeline(since_id=status_id, count=lookback_limit * 10) in_reply_to_ids = list(map(lambda x: x.in_reply_to_status_id, our_statuses)) if status_id not in in_reply_to_ids: # the twitter API and tweepy will attempt to give us the truncated text of the # message if we don't do this roundabout thing. encoded_status_text = self.api.get_status(status_id, tweet_mode="extended")._json["full_text"] status_text = html.unescape(encoded_status_text) message = callback(message_id=status_id, message=status_text, extra_keys={}) full_message = f"@{base_target_handle} {message}" self.log.info(f"Trying to reply with {message} to status {status_id} " f"from {target_handle}.") try: new_status = self.api.update_status(status=full_message, in_reply_to_status_id=status_id) records.append(TweetRecord(record_data={ "tweet_id": new_status.id, "in_reply_to": f"@{base_target_handle}", "in_reply_to_id": status_id, "text": full_message, })) except tweepy.TweepError as e: records.append(self.handle_error( message=(f"Bot {self.bot_name} encountered an error when " f"trying to reply to {status_id} with {message}:\n{e}\n"), error=e)) else: self.log.info(f"Not replying to status {status_id} from {target_handle} " f"- we already replied.") return records
[ "def", "perform_batch_reply", "(", "self", ",", "*", ",", "callback", ":", "Callable", "[", "...", ",", "str", "]", ",", "lookback_limit", ":", "int", ",", "target_handle", ":", "str", ",", ")", "->", "List", "[", "OutputRecord", "]", ":", "self", ".", "log", ".", "info", "(", "f\"Attempting to batch reply to birdsite user {target_handle}\"", ")", "if", "\"@\"", "in", "target_handle", ":", "base_target_handle", "=", "target_handle", "[", "1", ":", "]", "else", ":", "base_target_handle", "=", "target_handle", "records", ":", "List", "[", "OutputRecord", "]", "=", "[", "]", "statuses", "=", "self", ".", "api", ".", "user_timeline", "(", "screen_name", "=", "base_target_handle", ",", "count", "=", "lookback_limit", ")", "self", ".", "log", ".", "debug", "(", "f\"Retrieved {len(statuses)} statuses.\"", ")", "for", "i", ",", "status", "in", "enumerate", "(", "statuses", ")", ":", "self", ".", "log", ".", "debug", "(", "f\"Processing status {i} of {len(statuses)}\"", ")", "status_id", "=", "status", ".", "id", "# find possible replies we've made.", "# the 10 * lookback_limit is a guess,", "# might not be enough and I'm not sure we can guarantee it is.", "our_statuses", "=", "self", ".", "api", ".", "user_timeline", "(", "since_id", "=", "status_id", ",", "count", "=", "lookback_limit", "*", "10", ")", "in_reply_to_ids", "=", "list", "(", "map", "(", "lambda", "x", ":", "x", ".", "in_reply_to_status_id", ",", "our_statuses", ")", ")", "if", "status_id", "not", "in", "in_reply_to_ids", ":", "# the twitter API and tweepy will attempt to give us the truncated text of the", "# message if we don't do this roundabout thing.", "encoded_status_text", "=", "self", ".", "api", ".", "get_status", "(", "status_id", ",", "tweet_mode", "=", "\"extended\"", ")", ".", "_json", "[", "\"full_text\"", "]", "status_text", "=", "html", ".", "unescape", "(", "encoded_status_text", ")", "message", "=", "callback", "(", "message_id", "=", "status_id", ",", "message", "=", "status_text", ",", "extra_keys", "=", "{", "}", ")", "full_message", "=", "f\"@{base_target_handle} {message}\"", "self", ".", "log", ".", "info", "(", "f\"Trying to reply with {message} to status {status_id} \"", "f\"from {target_handle}.\"", ")", "try", ":", "new_status", "=", "self", ".", "api", ".", "update_status", "(", "status", "=", "full_message", ",", "in_reply_to_status_id", "=", "status_id", ")", "records", ".", "append", "(", "TweetRecord", "(", "record_data", "=", "{", "\"tweet_id\"", ":", "new_status", ".", "id", ",", "\"in_reply_to\"", ":", "f\"@{base_target_handle}\"", ",", "\"in_reply_to_id\"", ":", "status_id", ",", "\"text\"", ":", "full_message", ",", "}", ")", ")", "except", "tweepy", ".", "TweepError", "as", "e", ":", "records", ".", "append", "(", "self", ".", "handle_error", "(", "message", "=", "(", "f\"Bot {self.bot_name} encountered an error when \"", "f\"trying to reply to {status_id} with {message}:\\n{e}\\n\"", ")", ",", "error", "=", "e", ")", ")", "else", ":", "self", ".", "log", ".", "info", "(", "f\"Not replying to status {status_id} from {target_handle} \"", "f\"- we already replied.\"", ")", "return", "records" ]
Performs batch reply on target account. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param target: the id of the target account. :param lookback_limit: a lookback limit of how many messages to consider. :returns: list of output records, each corresponding to either a single post, or an error.
[ "Performs", "batch", "reply", "on", "target", "account", ".", "Looks", "up", "the", "recent", "messages", "of", "the", "target", "user", "applies", "the", "callback", "and", "replies", "with", "what", "the", "callback", "generates", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L146-L223
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton.send_dm_sos
def send_dm_sos(self, message: str) -> None: """ Send DM to owner if something happens. :param message: message to send to owner. :returns: None. """ if self.owner_handle: try: # twitter changed the DM API and tweepy (as of 2019-03-08) # has not adapted. # fixing with # https://github.com/tweepy/tweepy/issues/1081#issuecomment-423486837 owner_id = self.api.get_user(screen_name=self.owner_handle).id event = { "event": { "type": "message_create", "message_create": { "target": { "recipient_id": f"{owner_id}", }, "message_data": { "text": message } } } } self._send_direct_message_new(event) except tweepy.TweepError as de: self.lerror(f"Error trying to send DM about error!: {de}") else: self.lerror("Can't send DM SOS, no owner handle.")
python
def send_dm_sos(self, message: str) -> None: """ Send DM to owner if something happens. :param message: message to send to owner. :returns: None. """ if self.owner_handle: try: # twitter changed the DM API and tweepy (as of 2019-03-08) # has not adapted. # fixing with # https://github.com/tweepy/tweepy/issues/1081#issuecomment-423486837 owner_id = self.api.get_user(screen_name=self.owner_handle).id event = { "event": { "type": "message_create", "message_create": { "target": { "recipient_id": f"{owner_id}", }, "message_data": { "text": message } } } } self._send_direct_message_new(event) except tweepy.TweepError as de: self.lerror(f"Error trying to send DM about error!: {de}") else: self.lerror("Can't send DM SOS, no owner handle.")
[ "def", "send_dm_sos", "(", "self", ",", "message", ":", "str", ")", "->", "None", ":", "if", "self", ".", "owner_handle", ":", "try", ":", "# twitter changed the DM API and tweepy (as of 2019-03-08)", "# has not adapted.", "# fixing with", "# https://github.com/tweepy/tweepy/issues/1081#issuecomment-423486837", "owner_id", "=", "self", ".", "api", ".", "get_user", "(", "screen_name", "=", "self", ".", "owner_handle", ")", ".", "id", "event", "=", "{", "\"event\"", ":", "{", "\"type\"", ":", "\"message_create\"", ",", "\"message_create\"", ":", "{", "\"target\"", ":", "{", "\"recipient_id\"", ":", "f\"{owner_id}\"", ",", "}", ",", "\"message_data\"", ":", "{", "\"text\"", ":", "message", "}", "}", "}", "}", "self", ".", "_send_direct_message_new", "(", "event", ")", "except", "tweepy", ".", "TweepError", "as", "de", ":", "self", ".", "lerror", "(", "f\"Error trying to send DM about error!: {de}\"", ")", "else", ":", "self", ".", "lerror", "(", "\"Can't send DM SOS, no owner handle.\"", ")" ]
Send DM to owner if something happens. :param message: message to send to owner. :returns: None.
[ "Send", "DM", "to", "owner", "if", "something", "happens", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L226-L260
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton.handle_error
def handle_error( self, *, message: str, error: tweepy.TweepError, ) -> OutputRecord: """ Handle error while trying to do something. :param message: message to send in DM regarding error. :param e: tweepy error object. :returns: OutputRecord containing an error. """ self.lerror(f"Got an error! {error}") # Handle errors if we know how. try: code = error[0]["code"] if code in self.handled_errors: self.handled_errors[code] else: self.send_dm_sos(message) except Exception: self.send_dm_sos(message) return TweetRecord(error=error)
python
def handle_error( self, *, message: str, error: tweepy.TweepError, ) -> OutputRecord: """ Handle error while trying to do something. :param message: message to send in DM regarding error. :param e: tweepy error object. :returns: OutputRecord containing an error. """ self.lerror(f"Got an error! {error}") # Handle errors if we know how. try: code = error[0]["code"] if code in self.handled_errors: self.handled_errors[code] else: self.send_dm_sos(message) except Exception: self.send_dm_sos(message) return TweetRecord(error=error)
[ "def", "handle_error", "(", "self", ",", "*", ",", "message", ":", "str", ",", "error", ":", "tweepy", ".", "TweepError", ",", ")", "->", "OutputRecord", ":", "self", ".", "lerror", "(", "f\"Got an error! {error}\"", ")", "# Handle errors if we know how.", "try", ":", "code", "=", "error", "[", "0", "]", "[", "\"code\"", "]", "if", "code", "in", "self", ".", "handled_errors", ":", "self", ".", "handled_errors", "[", "code", "]", "else", ":", "self", ".", "send_dm_sos", "(", "message", ")", "except", "Exception", ":", "self", ".", "send_dm_sos", "(", "message", ")", "return", "TweetRecord", "(", "error", "=", "error", ")" ]
Handle error while trying to do something. :param message: message to send in DM regarding error. :param e: tweepy error object. :returns: OutputRecord containing an error.
[ "Handle", "error", "while", "trying", "to", "do", "something", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L262-L288
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton._handle_caption_upload
def _handle_caption_upload( self, *, media_ids: List[str], captions: Optional[List[str]], ) -> None: """ Handle uploading all captions. :param media_ids: media ids of uploads to attach captions to. :param captions: captions to be attached to those media ids. :returns: None. """ if captions is None: captions = [] if len(media_ids) > len(captions): captions.extend([self.default_caption_message] * (len(media_ids) - len(captions))) for i, media_id in enumerate(media_ids): caption = captions[i] self._upload_caption(media_id=media_id, caption=caption)
python
def _handle_caption_upload( self, *, media_ids: List[str], captions: Optional[List[str]], ) -> None: """ Handle uploading all captions. :param media_ids: media ids of uploads to attach captions to. :param captions: captions to be attached to those media ids. :returns: None. """ if captions is None: captions = [] if len(media_ids) > len(captions): captions.extend([self.default_caption_message] * (len(media_ids) - len(captions))) for i, media_id in enumerate(media_ids): caption = captions[i] self._upload_caption(media_id=media_id, caption=caption)
[ "def", "_handle_caption_upload", "(", "self", ",", "*", ",", "media_ids", ":", "List", "[", "str", "]", ",", "captions", ":", "Optional", "[", "List", "[", "str", "]", "]", ",", ")", "->", "None", ":", "if", "captions", "is", "None", ":", "captions", "=", "[", "]", "if", "len", "(", "media_ids", ")", ">", "len", "(", "captions", ")", ":", "captions", ".", "extend", "(", "[", "self", ".", "default_caption_message", "]", "*", "(", "len", "(", "media_ids", ")", "-", "len", "(", "captions", ")", ")", ")", "for", "i", ",", "media_id", "in", "enumerate", "(", "media_ids", ")", ":", "caption", "=", "captions", "[", "i", "]", "self", ".", "_upload_caption", "(", "media_id", "=", "media_id", ",", "caption", "=", "caption", ")" ]
Handle uploading all captions. :param media_ids: media ids of uploads to attach captions to. :param captions: captions to be attached to those media ids. :returns: None.
[ "Handle", "uploading", "all", "captions", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L298-L319
alixnovosi/botskeleton
botskeleton/outputs/output_birdsite.py
BirdsiteSkeleton._send_direct_message_new
def _send_direct_message_new(self, messageobject: Dict[str, Dict]) -> Any: """ :reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/new-event.html """ headers, post_data = _buildmessageobject(messageobject) newdm_path = "/direct_messages/events/new.json" return tweepy.binder.bind_api( api=self.api, path=newdm_path, method="POST", require_auth=True, )(post_data=post_data, headers=headers)
python
def _send_direct_message_new(self, messageobject: Dict[str, Dict]) -> Any: """ :reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/new-event.html """ headers, post_data = _buildmessageobject(messageobject) newdm_path = "/direct_messages/events/new.json" return tweepy.binder.bind_api( api=self.api, path=newdm_path, method="POST", require_auth=True, )(post_data=post_data, headers=headers)
[ "def", "_send_direct_message_new", "(", "self", ",", "messageobject", ":", "Dict", "[", "str", ",", "Dict", "]", ")", "->", "Any", ":", "headers", ",", "post_data", "=", "_buildmessageobject", "(", "messageobject", ")", "newdm_path", "=", "\"/direct_messages/events/new.json\"", "return", "tweepy", ".", "binder", ".", "bind_api", "(", "api", "=", "self", ".", "api", ",", "path", "=", "newdm_path", ",", "method", "=", "\"POST\"", ",", "require_auth", "=", "True", ",", ")", "(", "post_data", "=", "post_data", ",", "headers", "=", "headers", ")" ]
:reference: https://developer.twitter.com/en/docs/direct-messages/sending-and-receiving/api-reference/new-event.html
[ ":", "reference", ":", "https", ":", "//", "developer", ".", "twitter", ".", "com", "/", "en", "/", "docs", "/", "direct", "-", "messages", "/", "sending", "-", "and", "-", "receiving", "/", "api", "-", "reference", "/", "new", "-", "event", ".", "html" ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_birdsite.py#L343-L355
willkg/socorro-siggen
siggen/cmd_signify.py
main
def main(): """Takes crash data via stdin and generates a Socorro signature""" parser = argparse.ArgumentParser(description=DESCRIPTION) parser.add_argument( '-v', '--verbose', help='increase output verbosity', action='store_true' ) args = parser.parse_args() generator = SignatureGenerator(debug=args.verbose) crash_data = json.loads(sys.stdin.read()) ret = generator.generate(crash_data) print(json.dumps(ret, indent=2))
python
def main(): """Takes crash data via stdin and generates a Socorro signature""" parser = argparse.ArgumentParser(description=DESCRIPTION) parser.add_argument( '-v', '--verbose', help='increase output verbosity', action='store_true' ) args = parser.parse_args() generator = SignatureGenerator(debug=args.verbose) crash_data = json.loads(sys.stdin.read()) ret = generator.generate(crash_data) print(json.dumps(ret, indent=2))
[ "def", "main", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", "description", "=", "DESCRIPTION", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--verbose'", ",", "help", "=", "'increase output verbosity'", ",", "action", "=", "'store_true'", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "generator", "=", "SignatureGenerator", "(", "debug", "=", "args", ".", "verbose", ")", "crash_data", "=", "json", ".", "loads", "(", "sys", ".", "stdin", ".", "read", "(", ")", ")", "ret", "=", "generator", ".", "generate", "(", "crash_data", ")", "print", "(", "json", ".", "dumps", "(", "ret", ",", "indent", "=", "2", ")", ")" ]
Takes crash data via stdin and generates a Socorro signature
[ "Takes", "crash", "data", "via", "stdin", "and", "generates", "a", "Socorro", "signature" ]
train
https://github.com/willkg/socorro-siggen/blob/db7e3233e665a458a961c48da22e93a69b1d08d6/siggen/cmd_signify.py#L19-L33
20c/facsimile
facsimile/base.py
Base.add_config
def add_config(self, config): """ Update internel configuration dict with config and recheck """ for attr in self.__fixed_attrs: if attr in config: raise Exception("cannot set '%s' outside of init", attr) # pre checkout stages = config.get('stages', None) if stages: self.stages = stages # maybe pre checkout # validate options self.__dry_run = config.get('dry_run', False) self.system = str.lower(platform.system()) self.__start = config.get('start', None) self.__end = config.get('end', None) self.__only = config.get('only', None) self.__build_docs = config.get('build_docs', False) self.__chatty = config.get('chatty', False) self.__clean = config.get('clean', False) self.__devel = config.get('devel', False) self.__debug = config.get('debug', False) self.__skip_libcheck = config.get('skip_libcheck', False) self.__debuginfo = config.get('debuginfo', False) self.__release = config.get('release', False) self.__skip_unit = config.get('skip_unit', False) self.__static = config.get('static', False) self.__make_dash_j = int(config.get('j', 0)) self.__target_only = config.get('target_only', None) bits = config.get('bits', None) if bits: self.bits = int(bits) else: self.bits = self.sys_bits self.compiler = config.get('compiler', None) self.test_config = config.get('test_config', '-') if not self.test_config: self.test_config = '-' self.use_ccache = config.get('use_ccache', False) self.tmpl_engine = config.get('tmpl_engine', 'jinja2') self.__write_codec = config.get('write_codec', None) self.__codec = None # TODO move out of init if not config.get('skip_env_check', False): if "LD_LIBRARY_PATH" in os.environ: raise Exception("environment variable LD_LIBRARY_PATH is set") self.check_config()
python
def add_config(self, config): """ Update internel configuration dict with config and recheck """ for attr in self.__fixed_attrs: if attr in config: raise Exception("cannot set '%s' outside of init", attr) # pre checkout stages = config.get('stages', None) if stages: self.stages = stages # maybe pre checkout # validate options self.__dry_run = config.get('dry_run', False) self.system = str.lower(platform.system()) self.__start = config.get('start', None) self.__end = config.get('end', None) self.__only = config.get('only', None) self.__build_docs = config.get('build_docs', False) self.__chatty = config.get('chatty', False) self.__clean = config.get('clean', False) self.__devel = config.get('devel', False) self.__debug = config.get('debug', False) self.__skip_libcheck = config.get('skip_libcheck', False) self.__debuginfo = config.get('debuginfo', False) self.__release = config.get('release', False) self.__skip_unit = config.get('skip_unit', False) self.__static = config.get('static', False) self.__make_dash_j = int(config.get('j', 0)) self.__target_only = config.get('target_only', None) bits = config.get('bits', None) if bits: self.bits = int(bits) else: self.bits = self.sys_bits self.compiler = config.get('compiler', None) self.test_config = config.get('test_config', '-') if not self.test_config: self.test_config = '-' self.use_ccache = config.get('use_ccache', False) self.tmpl_engine = config.get('tmpl_engine', 'jinja2') self.__write_codec = config.get('write_codec', None) self.__codec = None # TODO move out of init if not config.get('skip_env_check', False): if "LD_LIBRARY_PATH" in os.environ: raise Exception("environment variable LD_LIBRARY_PATH is set") self.check_config()
[ "def", "add_config", "(", "self", ",", "config", ")", ":", "for", "attr", "in", "self", ".", "__fixed_attrs", ":", "if", "attr", "in", "config", ":", "raise", "Exception", "(", "\"cannot set '%s' outside of init\"", ",", "attr", ")", "# pre checkout", "stages", "=", "config", ".", "get", "(", "'stages'", ",", "None", ")", "if", "stages", ":", "self", ".", "stages", "=", "stages", "# maybe pre checkout", "# validate options", "self", ".", "__dry_run", "=", "config", ".", "get", "(", "'dry_run'", ",", "False", ")", "self", ".", "system", "=", "str", ".", "lower", "(", "platform", ".", "system", "(", ")", ")", "self", ".", "__start", "=", "config", ".", "get", "(", "'start'", ",", "None", ")", "self", ".", "__end", "=", "config", ".", "get", "(", "'end'", ",", "None", ")", "self", ".", "__only", "=", "config", ".", "get", "(", "'only'", ",", "None", ")", "self", ".", "__build_docs", "=", "config", ".", "get", "(", "'build_docs'", ",", "False", ")", "self", ".", "__chatty", "=", "config", ".", "get", "(", "'chatty'", ",", "False", ")", "self", ".", "__clean", "=", "config", ".", "get", "(", "'clean'", ",", "False", ")", "self", ".", "__devel", "=", "config", ".", "get", "(", "'devel'", ",", "False", ")", "self", ".", "__debug", "=", "config", ".", "get", "(", "'debug'", ",", "False", ")", "self", ".", "__skip_libcheck", "=", "config", ".", "get", "(", "'skip_libcheck'", ",", "False", ")", "self", ".", "__debuginfo", "=", "config", ".", "get", "(", "'debuginfo'", ",", "False", ")", "self", ".", "__release", "=", "config", ".", "get", "(", "'release'", ",", "False", ")", "self", ".", "__skip_unit", "=", "config", ".", "get", "(", "'skip_unit'", ",", "False", ")", "self", ".", "__static", "=", "config", ".", "get", "(", "'static'", ",", "False", ")", "self", ".", "__make_dash_j", "=", "int", "(", "config", ".", "get", "(", "'j'", ",", "0", ")", ")", "self", ".", "__target_only", "=", "config", ".", "get", "(", "'target_only'", ",", "None", ")", "bits", "=", "config", ".", "get", "(", "'bits'", ",", "None", ")", "if", "bits", ":", "self", ".", "bits", "=", "int", "(", "bits", ")", "else", ":", "self", ".", "bits", "=", "self", ".", "sys_bits", "self", ".", "compiler", "=", "config", ".", "get", "(", "'compiler'", ",", "None", ")", "self", ".", "test_config", "=", "config", ".", "get", "(", "'test_config'", ",", "'-'", ")", "if", "not", "self", ".", "test_config", ":", "self", ".", "test_config", "=", "'-'", "self", ".", "use_ccache", "=", "config", ".", "get", "(", "'use_ccache'", ",", "False", ")", "self", ".", "tmpl_engine", "=", "config", ".", "get", "(", "'tmpl_engine'", ",", "'jinja2'", ")", "self", ".", "__write_codec", "=", "config", ".", "get", "(", "'write_codec'", ",", "None", ")", "self", ".", "__codec", "=", "None", "# TODO move out of init", "if", "not", "config", ".", "get", "(", "'skip_env_check'", ",", "False", ")", ":", "if", "\"LD_LIBRARY_PATH\"", "in", "os", ".", "environ", ":", "raise", "Exception", "(", "\"environment variable LD_LIBRARY_PATH is set\"", ")", "self", ".", "check_config", "(", ")" ]
Update internel configuration dict with config and recheck
[ "Update", "internel", "configuration", "dict", "with", "config", "and", "recheck" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L234-L292
20c/facsimile
facsimile/base.py
Base.check_config
def check_config(self): """ called after config was modified to sanity check raises on error """ # sanity checks - no config access past here if not getattr(self, 'stages', None): raise NotImplementedError("member variable 'stages' must be defined") # start at stage if self.__start: self.__stage_start = self.find_stage(self.__start) else: self.__stage_start = 0 # end at stage if self.__end: self.__stage_end = self.find_stage(self.__end) + 1 self.opt_end = self.__end else: self.__stage_end = len(self.stages) # only stage if self.__only: if self.__start or self.__end: raise Exception( "stage option 'only' cannot be used with start or end") self.__stage_start = self.find_stage(self.__only) self.__stage_end = self.__stage_start + 1 if self.__devel: self.__devel = True # force deploy skip if self.__stage_end >= len(self.stages): self.status_msg("removing deploy stage for development build") # XXX self.__stage_end = self.__stage_end - 1 if self.stage_start >= self.stage_end: raise Exception("start and end produce no stages") if self.bits not in [32, 64]: raise Exception( "can't do a %d bit build: unknown build process" % self.bits) if self.bits == 64 and not self.is_64b: raise Exception( "this machine is not 64 bit, cannot perform 64 bit build") if self.system == 'windows': self.compilertag = 'vc10' elif self.system == 'linux': self.compilertag = 'gcc44' else: raise RuntimeError("can't decide compilertag on " + self.system) self.build_suffix = '' if not self.is_unixy: if self.__static: runtime = 'MT' else: runtime = 'MD' if self.__release: self.configuration_name = 'Release' else: runtime += 'd' self.configuration_name = 'Debug' self.build_suffix = '-' + runtime self.runtime = runtime else: self.configuration_name = 'CFNAME_INVALID_ON_LINUX' self.runtime = 'RUNTIME_INVALID_ON_LINUX' if self.test_config != '-': self.test_config = os.path.abspath(self.test_config) # split version if self.version: ver = self.version.split('.') self.version_major = int(ver[0]) self.version_minor = int(ver[1]) self.version_patch = int(ver[2]) if(len(ver) == 4): self.version_build = int(ver[3])
python
def check_config(self): """ called after config was modified to sanity check raises on error """ # sanity checks - no config access past here if not getattr(self, 'stages', None): raise NotImplementedError("member variable 'stages' must be defined") # start at stage if self.__start: self.__stage_start = self.find_stage(self.__start) else: self.__stage_start = 0 # end at stage if self.__end: self.__stage_end = self.find_stage(self.__end) + 1 self.opt_end = self.__end else: self.__stage_end = len(self.stages) # only stage if self.__only: if self.__start or self.__end: raise Exception( "stage option 'only' cannot be used with start or end") self.__stage_start = self.find_stage(self.__only) self.__stage_end = self.__stage_start + 1 if self.__devel: self.__devel = True # force deploy skip if self.__stage_end >= len(self.stages): self.status_msg("removing deploy stage for development build") # XXX self.__stage_end = self.__stage_end - 1 if self.stage_start >= self.stage_end: raise Exception("start and end produce no stages") if self.bits not in [32, 64]: raise Exception( "can't do a %d bit build: unknown build process" % self.bits) if self.bits == 64 and not self.is_64b: raise Exception( "this machine is not 64 bit, cannot perform 64 bit build") if self.system == 'windows': self.compilertag = 'vc10' elif self.system == 'linux': self.compilertag = 'gcc44' else: raise RuntimeError("can't decide compilertag on " + self.system) self.build_suffix = '' if not self.is_unixy: if self.__static: runtime = 'MT' else: runtime = 'MD' if self.__release: self.configuration_name = 'Release' else: runtime += 'd' self.configuration_name = 'Debug' self.build_suffix = '-' + runtime self.runtime = runtime else: self.configuration_name = 'CFNAME_INVALID_ON_LINUX' self.runtime = 'RUNTIME_INVALID_ON_LINUX' if self.test_config != '-': self.test_config = os.path.abspath(self.test_config) # split version if self.version: ver = self.version.split('.') self.version_major = int(ver[0]) self.version_minor = int(ver[1]) self.version_patch = int(ver[2]) if(len(ver) == 4): self.version_build = int(ver[3])
[ "def", "check_config", "(", "self", ")", ":", "# sanity checks - no config access past here", "if", "not", "getattr", "(", "self", ",", "'stages'", ",", "None", ")", ":", "raise", "NotImplementedError", "(", "\"member variable 'stages' must be defined\"", ")", "# start at stage", "if", "self", ".", "__start", ":", "self", ".", "__stage_start", "=", "self", ".", "find_stage", "(", "self", ".", "__start", ")", "else", ":", "self", ".", "__stage_start", "=", "0", "# end at stage", "if", "self", ".", "__end", ":", "self", ".", "__stage_end", "=", "self", ".", "find_stage", "(", "self", ".", "__end", ")", "+", "1", "self", ".", "opt_end", "=", "self", ".", "__end", "else", ":", "self", ".", "__stage_end", "=", "len", "(", "self", ".", "stages", ")", "# only stage", "if", "self", ".", "__only", ":", "if", "self", ".", "__start", "or", "self", ".", "__end", ":", "raise", "Exception", "(", "\"stage option 'only' cannot be used with start or end\"", ")", "self", ".", "__stage_start", "=", "self", ".", "find_stage", "(", "self", ".", "__only", ")", "self", ".", "__stage_end", "=", "self", ".", "__stage_start", "+", "1", "if", "self", ".", "__devel", ":", "self", ".", "__devel", "=", "True", "# force deploy skip", "if", "self", ".", "__stage_end", ">=", "len", "(", "self", ".", "stages", ")", ":", "self", ".", "status_msg", "(", "\"removing deploy stage for development build\"", ")", "# XXX self.__stage_end = self.__stage_end - 1", "if", "self", ".", "stage_start", ">=", "self", ".", "stage_end", ":", "raise", "Exception", "(", "\"start and end produce no stages\"", ")", "if", "self", ".", "bits", "not", "in", "[", "32", ",", "64", "]", ":", "raise", "Exception", "(", "\"can't do a %d bit build: unknown build process\"", "%", "self", ".", "bits", ")", "if", "self", ".", "bits", "==", "64", "and", "not", "self", ".", "is_64b", ":", "raise", "Exception", "(", "\"this machine is not 64 bit, cannot perform 64 bit build\"", ")", "if", "self", ".", "system", "==", "'windows'", ":", "self", ".", "compilertag", "=", "'vc10'", "elif", "self", ".", "system", "==", "'linux'", ":", "self", ".", "compilertag", "=", "'gcc44'", "else", ":", "raise", "RuntimeError", "(", "\"can't decide compilertag on \"", "+", "self", ".", "system", ")", "self", ".", "build_suffix", "=", "''", "if", "not", "self", ".", "is_unixy", ":", "if", "self", ".", "__static", ":", "runtime", "=", "'MT'", "else", ":", "runtime", "=", "'MD'", "if", "self", ".", "__release", ":", "self", ".", "configuration_name", "=", "'Release'", "else", ":", "runtime", "+=", "'d'", "self", ".", "configuration_name", "=", "'Debug'", "self", ".", "build_suffix", "=", "'-'", "+", "runtime", "self", ".", "runtime", "=", "runtime", "else", ":", "self", ".", "configuration_name", "=", "'CFNAME_INVALID_ON_LINUX'", "self", ".", "runtime", "=", "'RUNTIME_INVALID_ON_LINUX'", "if", "self", ".", "test_config", "!=", "'-'", ":", "self", ".", "test_config", "=", "os", ".", "path", ".", "abspath", "(", "self", ".", "test_config", ")", "# split version", "if", "self", ".", "version", ":", "ver", "=", "self", ".", "version", ".", "split", "(", "'.'", ")", "self", ".", "version_major", "=", "int", "(", "ver", "[", "0", "]", ")", "self", ".", "version_minor", "=", "int", "(", "ver", "[", "1", "]", ")", "self", ".", "version_patch", "=", "int", "(", "ver", "[", "2", "]", ")", "if", "(", "len", "(", "ver", ")", "==", "4", ")", ":", "self", ".", "version_build", "=", "int", "(", "ver", "[", "3", "]", ")" ]
called after config was modified to sanity check raises on error
[ "called", "after", "config", "was", "modified", "to", "sanity", "check", "raises", "on", "error" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L294-L376
20c/facsimile
facsimile/base.py
Base.check_definition
def check_definition(self): """ called after Defintion was loaded to sanity check raises on error """ if not self.write_codec: self.__write_codec = self.defined.data_ext # TODO need to add back a class scope target limited for subprojects with sub target sets targets = self.get_defined_targets() if self.__target_only: if self.__target_only not in targets: raise RuntimeError("invalid target '%s'" % self.__target_only) self.targets = [self.__target_only] else: self.targets = targets
python
def check_definition(self): """ called after Defintion was loaded to sanity check raises on error """ if not self.write_codec: self.__write_codec = self.defined.data_ext # TODO need to add back a class scope target limited for subprojects with sub target sets targets = self.get_defined_targets() if self.__target_only: if self.__target_only not in targets: raise RuntimeError("invalid target '%s'" % self.__target_only) self.targets = [self.__target_only] else: self.targets = targets
[ "def", "check_definition", "(", "self", ")", ":", "if", "not", "self", ".", "write_codec", ":", "self", ".", "__write_codec", "=", "self", ".", "defined", ".", "data_ext", "# TODO need to add back a class scope target limited for subprojects with sub target sets", "targets", "=", "self", ".", "get_defined_targets", "(", ")", "if", "self", ".", "__target_only", ":", "if", "self", ".", "__target_only", "not", "in", "targets", ":", "raise", "RuntimeError", "(", "\"invalid target '%s'\"", "%", "self", ".", "__target_only", ")", "self", ".", "targets", "=", "[", "self", ".", "__target_only", "]", "else", ":", "self", ".", "targets", "=", "targets" ]
called after Defintion was loaded to sanity check raises on error
[ "called", "after", "Defintion", "was", "loaded", "to", "sanity", "check", "raises", "on", "error" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L378-L394
20c/facsimile
facsimile/base.py
Base.find_datafile
def find_datafile(self, name, search_path=None): """ find all matching data files in search_path returns array of tuples (codec_object, filename) """ if not search_path: search_path = self.define_dir return codec.find_datafile(name, search_path)
python
def find_datafile(self, name, search_path=None): """ find all matching data files in search_path returns array of tuples (codec_object, filename) """ if not search_path: search_path = self.define_dir return codec.find_datafile(name, search_path)
[ "def", "find_datafile", "(", "self", ",", "name", ",", "search_path", "=", "None", ")", ":", "if", "not", "search_path", ":", "search_path", "=", "self", ".", "define_dir", "return", "codec", ".", "find_datafile", "(", "name", ",", "search_path", ")" ]
find all matching data files in search_path returns array of tuples (codec_object, filename)
[ "find", "all", "matching", "data", "files", "in", "search_path", "returns", "array", "of", "tuples", "(", "codec_object", "filename", ")" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L457-L465
20c/facsimile
facsimile/base.py
Base.load_datafile
def load_datafile(self, name, search_path=None, **kwargs): """ find datafile and load them from codec """ if not search_path: search_path = self.define_dir self.debug_msg('loading datafile %s from %s' % (name, str(search_path))) return codec.load_datafile(name, search_path, **kwargs)
python
def load_datafile(self, name, search_path=None, **kwargs): """ find datafile and load them from codec """ if not search_path: search_path = self.define_dir self.debug_msg('loading datafile %s from %s' % (name, str(search_path))) return codec.load_datafile(name, search_path, **kwargs)
[ "def", "load_datafile", "(", "self", ",", "name", ",", "search_path", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "not", "search_path", ":", "search_path", "=", "self", ".", "define_dir", "self", ".", "debug_msg", "(", "'loading datafile %s from %s'", "%", "(", "name", ",", "str", "(", "search_path", ")", ")", ")", "return", "codec", ".", "load_datafile", "(", "name", ",", "search_path", ",", "*", "*", "kwargs", ")" ]
find datafile and load them from codec
[ "find", "datafile", "and", "load", "them", "from", "codec" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L467-L475
20c/facsimile
facsimile/base.py
Base.run
def run(self): """ run all configured stages """ self.sanity_check() # TODO - check for devel # if not self.version: # raise Exception("no version") # XXX check attr exist if not self.release_environment: raise Exception("no instance name") time_start = time.time() cwd = os.getcwd() who = getpass.getuser() self._make_outdirs() append_notices = "" if hasattr(self, 'opt_end'): append_notices = ". shortened push, only to %s stage" % self.opt_end if self.is_devel: append_notices += ". devel build" if hasattr(self, 'append_notices'): append_notices += self.append_notices line = "%s %s %s by %s%s" % ( sys.argv[0], self.version, self.release_environment, who, append_notices) b = 'deploy begin %s' % line e = 'deploy done %s' % line if self.chatty: self.alact(b) ok = False stage_passed = None try: for stage in self.stages[self.stage_start:self.stage_end]: self.debug_msg("stage %s starting" % (stage,)) getattr(self, stage)() self.chdir(cwd) stage_passed = stage self.debug_msg("stage %s complete" % (stage,)) ok = True finally: if not ok: if self.chatty: if not stage_passed: self.alact( 'deploy failed %s. completed no stages' % line) else: self.alact('deploy failed %s. completed %s' % (line, stage_passed)) self.status_msg('[OK]') if self.chatty: self.alact('%s in %0.3f sec' % (e, time.time() - time_start)) return 0
python
def run(self): """ run all configured stages """ self.sanity_check() # TODO - check for devel # if not self.version: # raise Exception("no version") # XXX check attr exist if not self.release_environment: raise Exception("no instance name") time_start = time.time() cwd = os.getcwd() who = getpass.getuser() self._make_outdirs() append_notices = "" if hasattr(self, 'opt_end'): append_notices = ". shortened push, only to %s stage" % self.opt_end if self.is_devel: append_notices += ". devel build" if hasattr(self, 'append_notices'): append_notices += self.append_notices line = "%s %s %s by %s%s" % ( sys.argv[0], self.version, self.release_environment, who, append_notices) b = 'deploy begin %s' % line e = 'deploy done %s' % line if self.chatty: self.alact(b) ok = False stage_passed = None try: for stage in self.stages[self.stage_start:self.stage_end]: self.debug_msg("stage %s starting" % (stage,)) getattr(self, stage)() self.chdir(cwd) stage_passed = stage self.debug_msg("stage %s complete" % (stage,)) ok = True finally: if not ok: if self.chatty: if not stage_passed: self.alact( 'deploy failed %s. completed no stages' % line) else: self.alact('deploy failed %s. completed %s' % (line, stage_passed)) self.status_msg('[OK]') if self.chatty: self.alact('%s in %0.3f sec' % (e, time.time() - time_start)) return 0
[ "def", "run", "(", "self", ")", ":", "self", ".", "sanity_check", "(", ")", "# TODO - check for devel", "# if not self.version:", "# raise Exception(\"no version\")", "# XXX check attr exist", "if", "not", "self", ".", "release_environment", ":", "raise", "Exception", "(", "\"no instance name\"", ")", "time_start", "=", "time", ".", "time", "(", ")", "cwd", "=", "os", ".", "getcwd", "(", ")", "who", "=", "getpass", ".", "getuser", "(", ")", "self", ".", "_make_outdirs", "(", ")", "append_notices", "=", "\"\"", "if", "hasattr", "(", "self", ",", "'opt_end'", ")", ":", "append_notices", "=", "\". shortened push, only to %s stage\"", "%", "self", ".", "opt_end", "if", "self", ".", "is_devel", ":", "append_notices", "+=", "\". devel build\"", "if", "hasattr", "(", "self", ",", "'append_notices'", ")", ":", "append_notices", "+=", "self", ".", "append_notices", "line", "=", "\"%s %s %s by %s%s\"", "%", "(", "sys", ".", "argv", "[", "0", "]", ",", "self", ".", "version", ",", "self", ".", "release_environment", ",", "who", ",", "append_notices", ")", "b", "=", "'deploy begin %s'", "%", "line", "e", "=", "'deploy done %s'", "%", "line", "if", "self", ".", "chatty", ":", "self", ".", "alact", "(", "b", ")", "ok", "=", "False", "stage_passed", "=", "None", "try", ":", "for", "stage", "in", "self", ".", "stages", "[", "self", ".", "stage_start", ":", "self", ".", "stage_end", "]", ":", "self", ".", "debug_msg", "(", "\"stage %s starting\"", "%", "(", "stage", ",", ")", ")", "getattr", "(", "self", ",", "stage", ")", "(", ")", "self", ".", "chdir", "(", "cwd", ")", "stage_passed", "=", "stage", "self", ".", "debug_msg", "(", "\"stage %s complete\"", "%", "(", "stage", ",", ")", ")", "ok", "=", "True", "finally", ":", "if", "not", "ok", ":", "if", "self", ".", "chatty", ":", "if", "not", "stage_passed", ":", "self", ".", "alact", "(", "'deploy failed %s. completed no stages'", "%", "line", ")", "else", ":", "self", ".", "alact", "(", "'deploy failed %s. completed %s'", "%", "(", "line", ",", "stage_passed", ")", ")", "self", ".", "status_msg", "(", "'[OK]'", ")", "if", "self", ".", "chatty", ":", "self", ".", "alact", "(", "'%s in %0.3f sec'", "%", "(", "e", ",", "time", ".", "time", "(", ")", "-", "time_start", ")", ")", "return", "0" ]
run all configured stages
[ "run", "all", "configured", "stages" ]
train
https://github.com/20c/facsimile/blob/570e28568475d5be1b1a2c95b8e941fbfbc167eb/facsimile/base.py#L695-L753
jaraco/backports.datetime_timestamp
backports/datetime_timestamp/__init__.py
timestamp
def timestamp(dt): """ Return POSIX timestamp as float. >>> timestamp(datetime.datetime.now()) > 1494638812 True >>> timestamp(datetime.datetime.now()) % 1 > 0 True """ if dt.tzinfo is None: return time.mktime(( dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, -1, -1, -1)) + dt.microsecond / 1e6 else: return (dt - _EPOCH).total_seconds()
python
def timestamp(dt): """ Return POSIX timestamp as float. >>> timestamp(datetime.datetime.now()) > 1494638812 True >>> timestamp(datetime.datetime.now()) % 1 > 0 True """ if dt.tzinfo is None: return time.mktime(( dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, -1, -1, -1)) + dt.microsecond / 1e6 else: return (dt - _EPOCH).total_seconds()
[ "def", "timestamp", "(", "dt", ")", ":", "if", "dt", ".", "tzinfo", "is", "None", ":", "return", "time", ".", "mktime", "(", "(", "dt", ".", "year", ",", "dt", ".", "month", ",", "dt", ".", "day", ",", "dt", ".", "hour", ",", "dt", ".", "minute", ",", "dt", ".", "second", ",", "-", "1", ",", "-", "1", ",", "-", "1", ")", ")", "+", "dt", ".", "microsecond", "/", "1e6", "else", ":", "return", "(", "dt", "-", "_EPOCH", ")", ".", "total_seconds", "(", ")" ]
Return POSIX timestamp as float. >>> timestamp(datetime.datetime.now()) > 1494638812 True >>> timestamp(datetime.datetime.now()) % 1 > 0 True
[ "Return", "POSIX", "timestamp", "as", "float", "." ]
train
https://github.com/jaraco/backports.datetime_timestamp/blob/914d3f75b3f8818b8dd4ac1863698917be2354ca/backports/datetime_timestamp/__init__.py#L24-L40
alixnovosi/botskeleton
botskeleton/botskeleton.py
rate_limited
def rate_limited(max_per_hour: int, *args: Any) -> Callable[..., Any]: """Rate limit a function.""" return util.rate_limited(max_per_hour, *args)
python
def rate_limited(max_per_hour: int, *args: Any) -> Callable[..., Any]: """Rate limit a function.""" return util.rate_limited(max_per_hour, *args)
[ "def", "rate_limited", "(", "max_per_hour", ":", "int", ",", "*", "args", ":", "Any", ")", "->", "Callable", "[", "...", ",", "Any", "]", ":", "return", "util", ".", "rate_limited", "(", "max_per_hour", ",", "*", "args", ")" ]
Rate limit a function.
[ "Rate", "limit", "a", "function", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L492-L494
alixnovosi/botskeleton
botskeleton/botskeleton.py
_repair
def _repair(record: Dict[str, Any]) -> Dict[str, Any]: """Repair a corrupted IterationRecord with a specific known issue.""" output_records = record.get("output_records") if record.get("_type", None) == "IterationRecord" and output_records is not None: birdsite_record = output_records.get("birdsite") # check for the bug if isinstance(birdsite_record, dict) and birdsite_record.get("_type") == "IterationRecord": # get to the bottom of the corrupted record failed = False while birdsite_record.get("_type") == "IterationRecord": sub_record = birdsite_record.get("output_records") if sub_record is None: failed = True break birdsite_record = sub_record.get("birdsite") if birdsite_record is None: failed = True break if failed: return record # add type birdsite_record["_type"] = TweetRecord.__name__ # lift extra keys, just in case if "extra_keys" in birdsite_record: record_extra_values = record.get("extra_keys", {}) for key, value in birdsite_record["extra_keys"].items(): if key not in record_extra_values: record_extra_values[key] = value record["extra_keys"] = record_extra_values del birdsite_record["extra_keys"] output_records["birdsite"] = birdsite_record # pull that correct record up to the top level, fixing corruption record["output_records"] = output_records return record
python
def _repair(record: Dict[str, Any]) -> Dict[str, Any]: """Repair a corrupted IterationRecord with a specific known issue.""" output_records = record.get("output_records") if record.get("_type", None) == "IterationRecord" and output_records is not None: birdsite_record = output_records.get("birdsite") # check for the bug if isinstance(birdsite_record, dict) and birdsite_record.get("_type") == "IterationRecord": # get to the bottom of the corrupted record failed = False while birdsite_record.get("_type") == "IterationRecord": sub_record = birdsite_record.get("output_records") if sub_record is None: failed = True break birdsite_record = sub_record.get("birdsite") if birdsite_record is None: failed = True break if failed: return record # add type birdsite_record["_type"] = TweetRecord.__name__ # lift extra keys, just in case if "extra_keys" in birdsite_record: record_extra_values = record.get("extra_keys", {}) for key, value in birdsite_record["extra_keys"].items(): if key not in record_extra_values: record_extra_values[key] = value record["extra_keys"] = record_extra_values del birdsite_record["extra_keys"] output_records["birdsite"] = birdsite_record # pull that correct record up to the top level, fixing corruption record["output_records"] = output_records return record
[ "def", "_repair", "(", "record", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "output_records", "=", "record", ".", "get", "(", "\"output_records\"", ")", "if", "record", ".", "get", "(", "\"_type\"", ",", "None", ")", "==", "\"IterationRecord\"", "and", "output_records", "is", "not", "None", ":", "birdsite_record", "=", "output_records", ".", "get", "(", "\"birdsite\"", ")", "# check for the bug", "if", "isinstance", "(", "birdsite_record", ",", "dict", ")", "and", "birdsite_record", ".", "get", "(", "\"_type\"", ")", "==", "\"IterationRecord\"", ":", "# get to the bottom of the corrupted record", "failed", "=", "False", "while", "birdsite_record", ".", "get", "(", "\"_type\"", ")", "==", "\"IterationRecord\"", ":", "sub_record", "=", "birdsite_record", ".", "get", "(", "\"output_records\"", ")", "if", "sub_record", "is", "None", ":", "failed", "=", "True", "break", "birdsite_record", "=", "sub_record", ".", "get", "(", "\"birdsite\"", ")", "if", "birdsite_record", "is", "None", ":", "failed", "=", "True", "break", "if", "failed", ":", "return", "record", "# add type", "birdsite_record", "[", "\"_type\"", "]", "=", "TweetRecord", ".", "__name__", "# lift extra keys, just in case", "if", "\"extra_keys\"", "in", "birdsite_record", ":", "record_extra_values", "=", "record", ".", "get", "(", "\"extra_keys\"", ",", "{", "}", ")", "for", "key", ",", "value", "in", "birdsite_record", "[", "\"extra_keys\"", "]", ".", "items", "(", ")", ":", "if", "key", "not", "in", "record_extra_values", ":", "record_extra_values", "[", "key", "]", "=", "value", "record", "[", "\"extra_keys\"", "]", "=", "record_extra_values", "del", "birdsite_record", "[", "\"extra_keys\"", "]", "output_records", "[", "\"birdsite\"", "]", "=", "birdsite_record", "# pull that correct record up to the top level, fixing corruption", "record", "[", "\"output_records\"", "]", "=", "output_records", "return", "record" ]
Repair a corrupted IterationRecord with a specific known issue.
[ "Repair", "a", "corrupted", "IterationRecord", "with", "a", "specific", "known", "issue", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L510-L554
alixnovosi/botskeleton
botskeleton/botskeleton.py
IterationRecord.from_dict
def from_dict(cls, obj_dict: Dict[str, Any]) -> "IterationRecord": """Get object back from dict.""" obj = cls() for key, item in obj_dict.items(): obj.__dict__[key] = item return obj
python
def from_dict(cls, obj_dict: Dict[str, Any]) -> "IterationRecord": """Get object back from dict.""" obj = cls() for key, item in obj_dict.items(): obj.__dict__[key] = item return obj
[ "def", "from_dict", "(", "cls", ",", "obj_dict", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "\"IterationRecord\"", ":", "obj", "=", "cls", "(", ")", "for", "key", ",", "item", "in", "obj_dict", ".", "items", "(", ")", ":", "obj", ".", "__dict__", "[", "key", "]", "=", "item", "return", "obj" ]
Get object back from dict.
[ "Get", "object", "back", "from", "dict", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L48-L54
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.send
def send( self, *args: str, text: str=None, ) -> IterationRecord: """ Post text-only to all outputs. :param args: positional arguments. expected: text to send as message in post. keyword text argument is preferred over this. :param text: text to send as message in post. :returns: new record of iteration """ if text is not None: final_text = text else: if len(args) == 0: raise BotSkeletonException(("Please provide text either as a positional arg or " "as a keyword arg (text=TEXT)")) else: final_text = args[0] # TODO there could be some annotation stuff here. record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if output["active"]: self.log.info(f"Output {key} is active, calling send on it.") entry: Any = output["obj"] output_result = entry.send(text=final_text) record.output_records[key] = output_result else: self.log.info(f"Output {key} is inactive. Not sending.") self.history.append(record) self.update_history() return record
python
def send( self, *args: str, text: str=None, ) -> IterationRecord: """ Post text-only to all outputs. :param args: positional arguments. expected: text to send as message in post. keyword text argument is preferred over this. :param text: text to send as message in post. :returns: new record of iteration """ if text is not None: final_text = text else: if len(args) == 0: raise BotSkeletonException(("Please provide text either as a positional arg or " "as a keyword arg (text=TEXT)")) else: final_text = args[0] # TODO there could be some annotation stuff here. record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if output["active"]: self.log.info(f"Output {key} is active, calling send on it.") entry: Any = output["obj"] output_result = entry.send(text=final_text) record.output_records[key] = output_result else: self.log.info(f"Output {key} is inactive. Not sending.") self.history.append(record) self.update_history() return record
[ "def", "send", "(", "self", ",", "*", "args", ":", "str", ",", "text", ":", "str", "=", "None", ",", ")", "->", "IterationRecord", ":", "if", "text", "is", "not", "None", ":", "final_text", "=", "text", "else", ":", "if", "len", "(", "args", ")", "==", "0", ":", "raise", "BotSkeletonException", "(", "(", "\"Please provide text either as a positional arg or \"", "\"as a keyword arg (text=TEXT)\"", ")", ")", "else", ":", "final_text", "=", "args", "[", "0", "]", "# TODO there could be some annotation stuff here.", "record", "=", "IterationRecord", "(", "extra_keys", "=", "self", ".", "extra_keys", ")", "for", "key", ",", "output", "in", "self", ".", "outputs", ".", "items", "(", ")", ":", "if", "output", "[", "\"active\"", "]", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is active, calling send on it.\"", ")", "entry", ":", "Any", "=", "output", "[", "\"obj\"", "]", "output_result", "=", "entry", ".", "send", "(", "text", "=", "final_text", ")", "record", ".", "output_records", "[", "key", "]", "=", "output_result", "else", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is inactive. Not sending.\"", ")", "self", ".", "history", ".", "append", "(", "record", ")", "self", ".", "update_history", "(", ")", "return", "record" ]
Post text-only to all outputs. :param args: positional arguments. expected: text to send as message in post. keyword text argument is preferred over this. :param text: text to send as message in post. :returns: new record of iteration
[ "Post", "text", "-", "only", "to", "all", "outputs", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L102-L140
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.send_with_one_media
def send_with_one_media( self, *args: str, text: str=None, file: str=None, caption: str=None, ) -> IterationRecord: """ Post with one media item to all outputs. Provide filename so outputs can handle their own uploads. :param args: positional arguments. expected: text to send as message in post. file to be uploaded. caption to be paired with file. keyword arguments preferred over positional ones. :param text: text to send as message in post. :param file: file to be uploaded in post. :param caption: caption to be uploaded alongside file. :returns: new record of iteration """ final_text = text if final_text is None: if len(args) < 1: raise TypeError(("Please provide either positional argument " "TEXT, or keyword argument text=TEXT")) else: final_text = args[0] final_file = file if final_file is None: if len(args) < 2: raise TypeError(("Please provide either positional argument " "FILE, or keyword argument file=FILE")) else: final_file = args[1] # this arg is ACTUALLY optional, # so the pattern is changed. final_caption = caption if final_caption is None: if len(args) >= 3: final_caption = args[2] # TODO more error checking like this. if final_caption is None or final_caption == "": captions:List[str] = [] else: captions = [final_caption] record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if output["active"]: self.log.info(f"Output {key} is active, calling media send on it.") entry: Any = output["obj"] output_result = entry.send_with_media(text=final_text, files=[final_file], captions=captions) record.output_records[key] = output_result else: self.log.info(f"Output {key} is inactive. Not sending with media.") self.history.append(record) self.update_history() return record
python
def send_with_one_media( self, *args: str, text: str=None, file: str=None, caption: str=None, ) -> IterationRecord: """ Post with one media item to all outputs. Provide filename so outputs can handle their own uploads. :param args: positional arguments. expected: text to send as message in post. file to be uploaded. caption to be paired with file. keyword arguments preferred over positional ones. :param text: text to send as message in post. :param file: file to be uploaded in post. :param caption: caption to be uploaded alongside file. :returns: new record of iteration """ final_text = text if final_text is None: if len(args) < 1: raise TypeError(("Please provide either positional argument " "TEXT, or keyword argument text=TEXT")) else: final_text = args[0] final_file = file if final_file is None: if len(args) < 2: raise TypeError(("Please provide either positional argument " "FILE, or keyword argument file=FILE")) else: final_file = args[1] # this arg is ACTUALLY optional, # so the pattern is changed. final_caption = caption if final_caption is None: if len(args) >= 3: final_caption = args[2] # TODO more error checking like this. if final_caption is None or final_caption == "": captions:List[str] = [] else: captions = [final_caption] record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if output["active"]: self.log.info(f"Output {key} is active, calling media send on it.") entry: Any = output["obj"] output_result = entry.send_with_media(text=final_text, files=[final_file], captions=captions) record.output_records[key] = output_result else: self.log.info(f"Output {key} is inactive. Not sending with media.") self.history.append(record) self.update_history() return record
[ "def", "send_with_one_media", "(", "self", ",", "*", "args", ":", "str", ",", "text", ":", "str", "=", "None", ",", "file", ":", "str", "=", "None", ",", "caption", ":", "str", "=", "None", ",", ")", "->", "IterationRecord", ":", "final_text", "=", "text", "if", "final_text", "is", "None", ":", "if", "len", "(", "args", ")", "<", "1", ":", "raise", "TypeError", "(", "(", "\"Please provide either positional argument \"", "\"TEXT, or keyword argument text=TEXT\"", ")", ")", "else", ":", "final_text", "=", "args", "[", "0", "]", "final_file", "=", "file", "if", "final_file", "is", "None", ":", "if", "len", "(", "args", ")", "<", "2", ":", "raise", "TypeError", "(", "(", "\"Please provide either positional argument \"", "\"FILE, or keyword argument file=FILE\"", ")", ")", "else", ":", "final_file", "=", "args", "[", "1", "]", "# this arg is ACTUALLY optional,", "# so the pattern is changed.", "final_caption", "=", "caption", "if", "final_caption", "is", "None", ":", "if", "len", "(", "args", ")", ">=", "3", ":", "final_caption", "=", "args", "[", "2", "]", "# TODO more error checking like this.", "if", "final_caption", "is", "None", "or", "final_caption", "==", "\"\"", ":", "captions", ":", "List", "[", "str", "]", "=", "[", "]", "else", ":", "captions", "=", "[", "final_caption", "]", "record", "=", "IterationRecord", "(", "extra_keys", "=", "self", ".", "extra_keys", ")", "for", "key", ",", "output", "in", "self", ".", "outputs", ".", "items", "(", ")", ":", "if", "output", "[", "\"active\"", "]", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is active, calling media send on it.\"", ")", "entry", ":", "Any", "=", "output", "[", "\"obj\"", "]", "output_result", "=", "entry", ".", "send_with_media", "(", "text", "=", "final_text", ",", "files", "=", "[", "final_file", "]", ",", "captions", "=", "captions", ")", "record", ".", "output_records", "[", "key", "]", "=", "output_result", "else", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is inactive. Not sending with media.\"", ")", "self", ".", "history", ".", "append", "(", "record", ")", "self", ".", "update_history", "(", ")", "return", "record" ]
Post with one media item to all outputs. Provide filename so outputs can handle their own uploads. :param args: positional arguments. expected: text to send as message in post. file to be uploaded. caption to be paired with file. keyword arguments preferred over positional ones. :param text: text to send as message in post. :param file: file to be uploaded in post. :param caption: caption to be uploaded alongside file. :returns: new record of iteration
[ "Post", "with", "one", "media", "item", "to", "all", "outputs", ".", "Provide", "filename", "so", "outputs", "can", "handle", "their", "own", "uploads", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L142-L208
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.send_with_many_media
def send_with_many_media( self, *args: str, text: str=None, files: List[str]=None, captions: List[str]=[], ) -> IterationRecord: """ Post with several media. Provide filenames so outputs can handle their own uploads. :param args: positional arguments. expected: text to send as message in post. files to be uploaded. captions to be paired with files. keyword arguments preferred over positional ones. :param text: text to send as message in post. :param files: files to be uploaded in post. :param captions: captions to be uploaded alongside files. :returns: new record of iteration """ if text is None: if len(args) < 1: raise TypeError(("Please provide either required positional argument " "TEXT, or keyword argument text=TEXT")) else: final_text = args[0] else: final_text = text if files is None: if len(args) < 2: raise TypeError(("Please provide either positional argument " "FILES, or keyword argument files=FILES")) else: final_files = list(args[1:]) else: final_files = files # captions have never been permitted to be provided as positional args # (kind of backed myself into that) # so they just get defaulted and it's fine. record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if output["active"]: self.log.info(f"Output {key} is active, calling media send on it.") entry: Any = output["obj"] output_result = entry.send_with_media(text=final_text, files=final_files, captions=captions) record.output_records[key] = output_result else: self.log.info(f"Output {key} is inactive. Not sending with media.") self.history.append(record) self.update_history() return record
python
def send_with_many_media( self, *args: str, text: str=None, files: List[str]=None, captions: List[str]=[], ) -> IterationRecord: """ Post with several media. Provide filenames so outputs can handle their own uploads. :param args: positional arguments. expected: text to send as message in post. files to be uploaded. captions to be paired with files. keyword arguments preferred over positional ones. :param text: text to send as message in post. :param files: files to be uploaded in post. :param captions: captions to be uploaded alongside files. :returns: new record of iteration """ if text is None: if len(args) < 1: raise TypeError(("Please provide either required positional argument " "TEXT, or keyword argument text=TEXT")) else: final_text = args[0] else: final_text = text if files is None: if len(args) < 2: raise TypeError(("Please provide either positional argument " "FILES, or keyword argument files=FILES")) else: final_files = list(args[1:]) else: final_files = files # captions have never been permitted to be provided as positional args # (kind of backed myself into that) # so they just get defaulted and it's fine. record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if output["active"]: self.log.info(f"Output {key} is active, calling media send on it.") entry: Any = output["obj"] output_result = entry.send_with_media(text=final_text, files=final_files, captions=captions) record.output_records[key] = output_result else: self.log.info(f"Output {key} is inactive. Not sending with media.") self.history.append(record) self.update_history() return record
[ "def", "send_with_many_media", "(", "self", ",", "*", "args", ":", "str", ",", "text", ":", "str", "=", "None", ",", "files", ":", "List", "[", "str", "]", "=", "None", ",", "captions", ":", "List", "[", "str", "]", "=", "[", "]", ",", ")", "->", "IterationRecord", ":", "if", "text", "is", "None", ":", "if", "len", "(", "args", ")", "<", "1", ":", "raise", "TypeError", "(", "(", "\"Please provide either required positional argument \"", "\"TEXT, or keyword argument text=TEXT\"", ")", ")", "else", ":", "final_text", "=", "args", "[", "0", "]", "else", ":", "final_text", "=", "text", "if", "files", "is", "None", ":", "if", "len", "(", "args", ")", "<", "2", ":", "raise", "TypeError", "(", "(", "\"Please provide either positional argument \"", "\"FILES, or keyword argument files=FILES\"", ")", ")", "else", ":", "final_files", "=", "list", "(", "args", "[", "1", ":", "]", ")", "else", ":", "final_files", "=", "files", "# captions have never been permitted to be provided as positional args", "# (kind of backed myself into that)", "# so they just get defaulted and it's fine.", "record", "=", "IterationRecord", "(", "extra_keys", "=", "self", ".", "extra_keys", ")", "for", "key", ",", "output", "in", "self", ".", "outputs", ".", "items", "(", ")", ":", "if", "output", "[", "\"active\"", "]", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is active, calling media send on it.\"", ")", "entry", ":", "Any", "=", "output", "[", "\"obj\"", "]", "output_result", "=", "entry", ".", "send_with_media", "(", "text", "=", "final_text", ",", "files", "=", "final_files", ",", "captions", "=", "captions", ")", "record", ".", "output_records", "[", "key", "]", "=", "output_result", "else", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is inactive. Not sending with media.\"", ")", "self", ".", "history", ".", "append", "(", "record", ")", "self", ".", "update_history", "(", ")", "return", "record" ]
Post with several media. Provide filenames so outputs can handle their own uploads. :param args: positional arguments. expected: text to send as message in post. files to be uploaded. captions to be paired with files. keyword arguments preferred over positional ones. :param text: text to send as message in post. :param files: files to be uploaded in post. :param captions: captions to be uploaded alongside files. :returns: new record of iteration
[ "Post", "with", "several", "media", ".", "Provide", "filenames", "so", "outputs", "can", "handle", "their", "own", "uploads", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L210-L269
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.perform_batch_reply
def perform_batch_reply( self, *, callback: Callable[..., str]=None, target_handles: Dict[str, str]=None, lookback_limit: int=20, per_service_lookback_limit: Dict[str, int]=None, ) -> IterationRecord: """ Performs batch reply on target accounts. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param targets: a dictionary of service names to target handles (currently only one per service). :param lookback_limit: a lookback limit of how many messages to consider (optional). :param per_service_lookback: and a dictionary of service names to per-service lookback limits. takes preference over lookback_limit (optional). :returns: new record of iteration :raises BotSkeletonException: raises BotSkeletonException if batch reply fails or cannot be performed """ if callback is None: raise BotSkeletonException("Callback must be provided.""") if target_handles is None: raise BotSkeletonException("Targets must be provided.""") if lookback_limit > self.lookback_limit: raise BotSkeletonException( f"Lookback_limit cannot exceed {self.lookback_limit}, " + f"but it was {lookback_limit}" ) # use per-service lookback dict for convenience in a moment. # if necessary, use lookback_limit to fill it out. lookback_dict = per_service_lookback_limit if (lookback_dict is None): lookback_dict = {} record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if key not in lookback_dict: lookback_dict[key] = lookback_limit if target_handles.get(key, None) is None: self.log.info(f"No target for output {key}, skipping this output.") elif not output.get("active", False): self.log.info(f"Output {key} is inactive. Not calling batch reply.") elif output["active"]: self.log.info(f"Output {key} is active, calling batch reply on it.") entry: Any = output["obj"] output_result = entry.perform_batch_reply(callback=callback, target_handle=target_handles[key], lookback_limit=lookback_dict[key], ) record.output_records[key] = output_result self.history.append(record) self.update_history() return record
python
def perform_batch_reply( self, *, callback: Callable[..., str]=None, target_handles: Dict[str, str]=None, lookback_limit: int=20, per_service_lookback_limit: Dict[str, int]=None, ) -> IterationRecord: """ Performs batch reply on target accounts. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param targets: a dictionary of service names to target handles (currently only one per service). :param lookback_limit: a lookback limit of how many messages to consider (optional). :param per_service_lookback: and a dictionary of service names to per-service lookback limits. takes preference over lookback_limit (optional). :returns: new record of iteration :raises BotSkeletonException: raises BotSkeletonException if batch reply fails or cannot be performed """ if callback is None: raise BotSkeletonException("Callback must be provided.""") if target_handles is None: raise BotSkeletonException("Targets must be provided.""") if lookback_limit > self.lookback_limit: raise BotSkeletonException( f"Lookback_limit cannot exceed {self.lookback_limit}, " + f"but it was {lookback_limit}" ) # use per-service lookback dict for convenience in a moment. # if necessary, use lookback_limit to fill it out. lookback_dict = per_service_lookback_limit if (lookback_dict is None): lookback_dict = {} record = IterationRecord(extra_keys=self.extra_keys) for key, output in self.outputs.items(): if key not in lookback_dict: lookback_dict[key] = lookback_limit if target_handles.get(key, None) is None: self.log.info(f"No target for output {key}, skipping this output.") elif not output.get("active", False): self.log.info(f"Output {key} is inactive. Not calling batch reply.") elif output["active"]: self.log.info(f"Output {key} is active, calling batch reply on it.") entry: Any = output["obj"] output_result = entry.perform_batch_reply(callback=callback, target_handle=target_handles[key], lookback_limit=lookback_dict[key], ) record.output_records[key] = output_result self.history.append(record) self.update_history() return record
[ "def", "perform_batch_reply", "(", "self", ",", "*", ",", "callback", ":", "Callable", "[", "...", ",", "str", "]", "=", "None", ",", "target_handles", ":", "Dict", "[", "str", ",", "str", "]", "=", "None", ",", "lookback_limit", ":", "int", "=", "20", ",", "per_service_lookback_limit", ":", "Dict", "[", "str", ",", "int", "]", "=", "None", ",", ")", "->", "IterationRecord", ":", "if", "callback", "is", "None", ":", "raise", "BotSkeletonException", "(", "\"Callback must be provided.\"", "\"\"", ")", "if", "target_handles", "is", "None", ":", "raise", "BotSkeletonException", "(", "\"Targets must be provided.\"", "\"\"", ")", "if", "lookback_limit", ">", "self", ".", "lookback_limit", ":", "raise", "BotSkeletonException", "(", "f\"Lookback_limit cannot exceed {self.lookback_limit}, \"", "+", "f\"but it was {lookback_limit}\"", ")", "# use per-service lookback dict for convenience in a moment.", "# if necessary, use lookback_limit to fill it out.", "lookback_dict", "=", "per_service_lookback_limit", "if", "(", "lookback_dict", "is", "None", ")", ":", "lookback_dict", "=", "{", "}", "record", "=", "IterationRecord", "(", "extra_keys", "=", "self", ".", "extra_keys", ")", "for", "key", ",", "output", "in", "self", ".", "outputs", ".", "items", "(", ")", ":", "if", "key", "not", "in", "lookback_dict", ":", "lookback_dict", "[", "key", "]", "=", "lookback_limit", "if", "target_handles", ".", "get", "(", "key", ",", "None", ")", "is", "None", ":", "self", ".", "log", ".", "info", "(", "f\"No target for output {key}, skipping this output.\"", ")", "elif", "not", "output", ".", "get", "(", "\"active\"", ",", "False", ")", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is inactive. Not calling batch reply.\"", ")", "elif", "output", "[", "\"active\"", "]", ":", "self", ".", "log", ".", "info", "(", "f\"Output {key} is active, calling batch reply on it.\"", ")", "entry", ":", "Any", "=", "output", "[", "\"obj\"", "]", "output_result", "=", "entry", ".", "perform_batch_reply", "(", "callback", "=", "callback", ",", "target_handle", "=", "target_handles", "[", "key", "]", ",", "lookback_limit", "=", "lookback_dict", "[", "key", "]", ",", ")", "record", ".", "output_records", "[", "key", "]", "=", "output_result", "self", ".", "history", ".", "append", "(", "record", ")", "self", ".", "update_history", "(", ")", "return", "record" ]
Performs batch reply on target accounts. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param targets: a dictionary of service names to target handles (currently only one per service). :param lookback_limit: a lookback limit of how many messages to consider (optional). :param per_service_lookback: and a dictionary of service names to per-service lookback limits. takes preference over lookback_limit (optional). :returns: new record of iteration :raises BotSkeletonException: raises BotSkeletonException if batch reply fails or cannot be performed
[ "Performs", "batch", "reply", "on", "target", "accounts", ".", "Looks", "up", "the", "recent", "messages", "of", "the", "target", "user", "applies", "the", "callback", "and", "replies", "with", "what", "the", "callback", "generates", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L271-L341
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.nap
def nap(self) -> None: """ Go to sleep for the duration of self.delay. :returns: None """ self.log.info(f"Sleeping for {self.delay} seconds.") for _ in progress.bar(range(self.delay)): time.sleep(1)
python
def nap(self) -> None: """ Go to sleep for the duration of self.delay. :returns: None """ self.log.info(f"Sleeping for {self.delay} seconds.") for _ in progress.bar(range(self.delay)): time.sleep(1)
[ "def", "nap", "(", "self", ")", "->", "None", ":", "self", ".", "log", ".", "info", "(", "f\"Sleeping for {self.delay} seconds.\"", ")", "for", "_", "in", "progress", ".", "bar", "(", "range", "(", "self", ".", "delay", ")", ")", ":", "time", ".", "sleep", "(", "1", ")" ]
Go to sleep for the duration of self.delay. :returns: None
[ "Go", "to", "sleep", "for", "the", "duration", "of", "self", ".", "delay", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L343-L351
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.store_extra_info
def store_extra_info(self, key: str, value: Any) -> None: """ Store some extra value in the messaging storage. :param key: key of dictionary entry to add. :param value: value of dictionary entry to add. :returns: None """ self.extra_keys[key] = value
python
def store_extra_info(self, key: str, value: Any) -> None: """ Store some extra value in the messaging storage. :param key: key of dictionary entry to add. :param value: value of dictionary entry to add. :returns: None """ self.extra_keys[key] = value
[ "def", "store_extra_info", "(", "self", ",", "key", ":", "str", ",", "value", ":", "Any", ")", "->", "None", ":", "self", ".", "extra_keys", "[", "key", "]", "=", "value" ]
Store some extra value in the messaging storage. :param key: key of dictionary entry to add. :param value: value of dictionary entry to add. :returns: None
[ "Store", "some", "extra", "value", "in", "the", "messaging", "storage", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L353-L361
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.store_extra_keys
def store_extra_keys(self, d: Dict[str, Any]) -> None: """ Store several extra values in the messaging storage. :param d: dictionary entry to merge with current self.extra_keys. :returns: None """ new_dict = dict(self.extra_keys, **d) self.extra_keys = new_dict.copy()
python
def store_extra_keys(self, d: Dict[str, Any]) -> None: """ Store several extra values in the messaging storage. :param d: dictionary entry to merge with current self.extra_keys. :returns: None """ new_dict = dict(self.extra_keys, **d) self.extra_keys = new_dict.copy()
[ "def", "store_extra_keys", "(", "self", ",", "d", ":", "Dict", "[", "str", ",", "Any", "]", ")", "->", "None", ":", "new_dict", "=", "dict", "(", "self", ".", "extra_keys", ",", "*", "*", "d", ")", "self", ".", "extra_keys", "=", "new_dict", ".", "copy", "(", ")" ]
Store several extra values in the messaging storage. :param d: dictionary entry to merge with current self.extra_keys. :returns: None
[ "Store", "several", "extra", "values", "in", "the", "messaging", "storage", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L363-L371
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.update_history
def update_history(self) -> None: """ Update messaging history on disk. :returns: None """ self.log.debug(f"Saving history. History is: \n{self.history}") jsons = [] for item in self.history: json_item = item.__dict__ # Convert sub-entries into JSON as well. json_item["output_records"] = self._parse_output_records(item) jsons.append(json_item) if not path.isfile(self.history_filename): open(self.history_filename, "a+").close() with open(self.history_filename, "w") as f: json.dump(jsons, f, default=lambda x: x.__dict__.copy(), sort_keys=True, indent=4) f.write("\n")
python
def update_history(self) -> None: """ Update messaging history on disk. :returns: None """ self.log.debug(f"Saving history. History is: \n{self.history}") jsons = [] for item in self.history: json_item = item.__dict__ # Convert sub-entries into JSON as well. json_item["output_records"] = self._parse_output_records(item) jsons.append(json_item) if not path.isfile(self.history_filename): open(self.history_filename, "a+").close() with open(self.history_filename, "w") as f: json.dump(jsons, f, default=lambda x: x.__dict__.copy(), sort_keys=True, indent=4) f.write("\n")
[ "def", "update_history", "(", "self", ")", "->", "None", ":", "self", ".", "log", ".", "debug", "(", "f\"Saving history. History is: \\n{self.history}\"", ")", "jsons", "=", "[", "]", "for", "item", "in", "self", ".", "history", ":", "json_item", "=", "item", ".", "__dict__", "# Convert sub-entries into JSON as well.", "json_item", "[", "\"output_records\"", "]", "=", "self", ".", "_parse_output_records", "(", "item", ")", "jsons", ".", "append", "(", "json_item", ")", "if", "not", "path", ".", "isfile", "(", "self", ".", "history_filename", ")", ":", "open", "(", "self", ".", "history_filename", ",", "\"a+\"", ")", ".", "close", "(", ")", "with", "open", "(", "self", ".", "history_filename", ",", "\"w\"", ")", "as", "f", ":", "json", ".", "dump", "(", "jsons", ",", "f", ",", "default", "=", "lambda", "x", ":", "x", ".", "__dict__", ".", "copy", "(", ")", ",", "sort_keys", "=", "True", ",", "indent", "=", "4", ")", "f", ".", "write", "(", "\"\\n\"", ")" ]
Update messaging history on disk. :returns: None
[ "Update", "messaging", "history", "on", "disk", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L373-L395
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton.load_history
def load_history(self) -> List["IterationRecord"]: """ Load messaging history from disk to self. :returns: List of iteration records comprising history. """ if path.isfile(self.history_filename): with open(self.history_filename, "r") as f: try: dicts = json.load(f) except json.decoder.JSONDecodeError as e: self.log.error(f"Got error \n{e}\n decoding JSON history, overwriting it.\n" f"Former history available in {self.history_filename}.bak") copyfile(self.history_filename, f"{self.history_filename}.bak") return [] history: List[IterationRecord] = [] for hdict_pre in dicts: if "_type" in hdict_pre and hdict_pre["_type"] == IterationRecord.__name__: # repair any corrupted entries hdict = _repair(hdict_pre) record = IterationRecord.from_dict(hdict) history.append(record) # Be sure to handle legacy tweetrecord-only histories. # Assume anything without our new _type (which should have been there from the # start, whoops) is a legacy history. else: item = IterationRecord() # Lift extra keys up to upper record (if they exist). extra_keys = hdict_pre.pop("extra_keys", {}) item.extra_keys = extra_keys hdict_obj = TweetRecord.from_dict(hdict_pre) # Lift timestamp up to upper record. item.timestamp = hdict_obj.timestamp item.output_records["birdsite"] = hdict_obj history.append(item) self.log.debug(f"Loaded history:\n {history}") return history else: return []
python
def load_history(self) -> List["IterationRecord"]: """ Load messaging history from disk to self. :returns: List of iteration records comprising history. """ if path.isfile(self.history_filename): with open(self.history_filename, "r") as f: try: dicts = json.load(f) except json.decoder.JSONDecodeError as e: self.log.error(f"Got error \n{e}\n decoding JSON history, overwriting it.\n" f"Former history available in {self.history_filename}.bak") copyfile(self.history_filename, f"{self.history_filename}.bak") return [] history: List[IterationRecord] = [] for hdict_pre in dicts: if "_type" in hdict_pre and hdict_pre["_type"] == IterationRecord.__name__: # repair any corrupted entries hdict = _repair(hdict_pre) record = IterationRecord.from_dict(hdict) history.append(record) # Be sure to handle legacy tweetrecord-only histories. # Assume anything without our new _type (which should have been there from the # start, whoops) is a legacy history. else: item = IterationRecord() # Lift extra keys up to upper record (if they exist). extra_keys = hdict_pre.pop("extra_keys", {}) item.extra_keys = extra_keys hdict_obj = TweetRecord.from_dict(hdict_pre) # Lift timestamp up to upper record. item.timestamp = hdict_obj.timestamp item.output_records["birdsite"] = hdict_obj history.append(item) self.log.debug(f"Loaded history:\n {history}") return history else: return []
[ "def", "load_history", "(", "self", ")", "->", "List", "[", "\"IterationRecord\"", "]", ":", "if", "path", ".", "isfile", "(", "self", ".", "history_filename", ")", ":", "with", "open", "(", "self", ".", "history_filename", ",", "\"r\"", ")", "as", "f", ":", "try", ":", "dicts", "=", "json", ".", "load", "(", "f", ")", "except", "json", ".", "decoder", ".", "JSONDecodeError", "as", "e", ":", "self", ".", "log", ".", "error", "(", "f\"Got error \\n{e}\\n decoding JSON history, overwriting it.\\n\"", "f\"Former history available in {self.history_filename}.bak\"", ")", "copyfile", "(", "self", ".", "history_filename", ",", "f\"{self.history_filename}.bak\"", ")", "return", "[", "]", "history", ":", "List", "[", "IterationRecord", "]", "=", "[", "]", "for", "hdict_pre", "in", "dicts", ":", "if", "\"_type\"", "in", "hdict_pre", "and", "hdict_pre", "[", "\"_type\"", "]", "==", "IterationRecord", ".", "__name__", ":", "# repair any corrupted entries", "hdict", "=", "_repair", "(", "hdict_pre", ")", "record", "=", "IterationRecord", ".", "from_dict", "(", "hdict", ")", "history", ".", "append", "(", "record", ")", "# Be sure to handle legacy tweetrecord-only histories.", "# Assume anything without our new _type (which should have been there from the", "# start, whoops) is a legacy history.", "else", ":", "item", "=", "IterationRecord", "(", ")", "# Lift extra keys up to upper record (if they exist).", "extra_keys", "=", "hdict_pre", ".", "pop", "(", "\"extra_keys\"", ",", "{", "}", ")", "item", ".", "extra_keys", "=", "extra_keys", "hdict_obj", "=", "TweetRecord", ".", "from_dict", "(", "hdict_pre", ")", "# Lift timestamp up to upper record.", "item", ".", "timestamp", "=", "hdict_obj", ".", "timestamp", "item", ".", "output_records", "[", "\"birdsite\"", "]", "=", "hdict_obj", "history", ".", "append", "(", "item", ")", "self", ".", "log", ".", "debug", "(", "f\"Loaded history:\\n {history}\"", ")", "return", "history", "else", ":", "return", "[", "]" ]
Load messaging history from disk to self. :returns: List of iteration records comprising history.
[ "Load", "messaging", "history", "from", "disk", "to", "self", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L397-L447
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton._setup_all_outputs
def _setup_all_outputs(self) -> None: """Set up all output methods. Provide them credentials and anything else they need.""" # The way this is gonna work is that we assume an output should be set up iff it has a # credentials_ directory under our secrets dir. for key in self.outputs.keys(): credentials_dir = path.join(self.secrets_dir, f"credentials_{key}") # special-case birdsite for historical reasons. if key == "birdsite" and not path.isdir(credentials_dir) \ and path.isfile(path.join(self.secrets_dir, "CONSUMER_KEY")): credentials_dir = self.secrets_dir if path.isdir(credentials_dir): output_skeleton = self.outputs[key] output_skeleton["active"] = True obj: Any = output_skeleton["obj"] obj.cred_init(secrets_dir=credentials_dir, log=self.log, bot_name=self.bot_name) output_skeleton["obj"] = obj self.outputs[key] = output_skeleton
python
def _setup_all_outputs(self) -> None: """Set up all output methods. Provide them credentials and anything else they need.""" # The way this is gonna work is that we assume an output should be set up iff it has a # credentials_ directory under our secrets dir. for key in self.outputs.keys(): credentials_dir = path.join(self.secrets_dir, f"credentials_{key}") # special-case birdsite for historical reasons. if key == "birdsite" and not path.isdir(credentials_dir) \ and path.isfile(path.join(self.secrets_dir, "CONSUMER_KEY")): credentials_dir = self.secrets_dir if path.isdir(credentials_dir): output_skeleton = self.outputs[key] output_skeleton["active"] = True obj: Any = output_skeleton["obj"] obj.cred_init(secrets_dir=credentials_dir, log=self.log, bot_name=self.bot_name) output_skeleton["obj"] = obj self.outputs[key] = output_skeleton
[ "def", "_setup_all_outputs", "(", "self", ")", "->", "None", ":", "# The way this is gonna work is that we assume an output should be set up iff it has a", "# credentials_ directory under our secrets dir.", "for", "key", "in", "self", ".", "outputs", ".", "keys", "(", ")", ":", "credentials_dir", "=", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "f\"credentials_{key}\"", ")", "# special-case birdsite for historical reasons.", "if", "key", "==", "\"birdsite\"", "and", "not", "path", ".", "isdir", "(", "credentials_dir", ")", "and", "path", ".", "isfile", "(", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"CONSUMER_KEY\"", ")", ")", ":", "credentials_dir", "=", "self", ".", "secrets_dir", "if", "path", ".", "isdir", "(", "credentials_dir", ")", ":", "output_skeleton", "=", "self", ".", "outputs", "[", "key", "]", "output_skeleton", "[", "\"active\"", "]", "=", "True", "obj", ":", "Any", "=", "output_skeleton", "[", "\"obj\"", "]", "obj", ".", "cred_init", "(", "secrets_dir", "=", "credentials_dir", ",", "log", "=", "self", ".", "log", ",", "bot_name", "=", "self", ".", "bot_name", ")", "output_skeleton", "[", "\"obj\"", "]", "=", "obj", "self", ".", "outputs", "[", "key", "]", "=", "output_skeleton" ]
Set up all output methods. Provide them credentials and anything else they need.
[ "Set", "up", "all", "output", "methods", ".", "Provide", "them", "credentials", "and", "anything", "else", "they", "need", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L452-L475
alixnovosi/botskeleton
botskeleton/botskeleton.py
BotSkeleton._parse_output_records
def _parse_output_records(self, item: IterationRecord) -> Dict[str, Any]: """Parse output records into dicts ready for JSON.""" output_records = {} for key, sub_item in item.output_records.items(): if isinstance(sub_item, dict) or isinstance(sub_item, list): output_records[key] = sub_item else: output_records[key] = sub_item.__dict__ return output_records
python
def _parse_output_records(self, item: IterationRecord) -> Dict[str, Any]: """Parse output records into dicts ready for JSON.""" output_records = {} for key, sub_item in item.output_records.items(): if isinstance(sub_item, dict) or isinstance(sub_item, list): output_records[key] = sub_item else: output_records[key] = sub_item.__dict__ return output_records
[ "def", "_parse_output_records", "(", "self", ",", "item", ":", "IterationRecord", ")", "->", "Dict", "[", "str", ",", "Any", "]", ":", "output_records", "=", "{", "}", "for", "key", ",", "sub_item", "in", "item", ".", "output_records", ".", "items", "(", ")", ":", "if", "isinstance", "(", "sub_item", ",", "dict", ")", "or", "isinstance", "(", "sub_item", ",", "list", ")", ":", "output_records", "[", "key", "]", "=", "sub_item", "else", ":", "output_records", "[", "key", "]", "=", "sub_item", ".", "__dict__", "return", "output_records" ]
Parse output records into dicts ready for JSON.
[ "Parse", "output", "records", "into", "dicts", "ready", "for", "JSON", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/botskeleton.py#L477-L486
pmacosta/pmisc
pmisc/file.py
make_dir
def make_dir(fname): """ Create the directory of a fully qualified file name if it does not exist. :param fname: File name :type fname: string Equivalent to these Bash shell commands: .. code-block:: bash $ fname="${HOME}/mydir/myfile.txt" $ dir=$(dirname "${fname}") $ mkdir -p "${dir}" :param fname: Fully qualified file name :type fname: string """ file_path, fname = os.path.split(os.path.abspath(fname)) if not os.path.exists(file_path): os.makedirs(file_path)
python
def make_dir(fname): """ Create the directory of a fully qualified file name if it does not exist. :param fname: File name :type fname: string Equivalent to these Bash shell commands: .. code-block:: bash $ fname="${HOME}/mydir/myfile.txt" $ dir=$(dirname "${fname}") $ mkdir -p "${dir}" :param fname: Fully qualified file name :type fname: string """ file_path, fname = os.path.split(os.path.abspath(fname)) if not os.path.exists(file_path): os.makedirs(file_path)
[ "def", "make_dir", "(", "fname", ")", ":", "file_path", ",", "fname", "=", "os", ".", "path", ".", "split", "(", "os", ".", "path", ".", "abspath", "(", "fname", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "file_path", ")", ":", "os", ".", "makedirs", "(", "file_path", ")" ]
Create the directory of a fully qualified file name if it does not exist. :param fname: File name :type fname: string Equivalent to these Bash shell commands: .. code-block:: bash $ fname="${HOME}/mydir/myfile.txt" $ dir=$(dirname "${fname}") $ mkdir -p "${dir}" :param fname: Fully qualified file name :type fname: string
[ "Create", "the", "directory", "of", "a", "fully", "qualified", "file", "name", "if", "it", "does", "not", "exist", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/file.py#L14-L34
pmacosta/pmisc
pmisc/file.py
normalize_windows_fname
def normalize_windows_fname(fname, _force=False): r""" Fix potential problems with a Microsoft Windows file name. Superfluous backslashes are removed and unintended escape sequences are converted to their equivalent (presumably correct and intended) representation, for example :code:`r'\\\\x07pps'` is transformed to :code:`r'\\\\\\\\apps'`. A file name is considered network shares if the file does not include a drive letter and they start with a double backslash (:code:`'\\\\\\\\'`) :param fname: File name :type fname: string :rtype: string """ if (platform.system().lower() != "windows") and (not _force): # pragma: no cover return fname # Replace unintended escape sequences that could be in # the file name, like "C:\appdata" rchars = { "\x07": r"\\a", "\x08": r"\\b", "\x0C": r"\\f", "\x0A": r"\\n", "\x0D": r"\\r", "\x09": r"\\t", "\x0B": r"\\v", } ret = "" for char in os.path.normpath(fname): ret = ret + rchars.get(char, char) # Remove superfluous double backslashes network_share = False tmp = None network_share = fname.startswith(r"\\") while tmp != ret: tmp, ret = ret, ret.replace(r"\\\\", r"\\") ret = ret.replace(r"\\\\", r"\\") # Put back network share if needed if network_share: ret = r"\\" + ret.lstrip(r"\\") return ret
python
def normalize_windows_fname(fname, _force=False): r""" Fix potential problems with a Microsoft Windows file name. Superfluous backslashes are removed and unintended escape sequences are converted to their equivalent (presumably correct and intended) representation, for example :code:`r'\\\\x07pps'` is transformed to :code:`r'\\\\\\\\apps'`. A file name is considered network shares if the file does not include a drive letter and they start with a double backslash (:code:`'\\\\\\\\'`) :param fname: File name :type fname: string :rtype: string """ if (platform.system().lower() != "windows") and (not _force): # pragma: no cover return fname # Replace unintended escape sequences that could be in # the file name, like "C:\appdata" rchars = { "\x07": r"\\a", "\x08": r"\\b", "\x0C": r"\\f", "\x0A": r"\\n", "\x0D": r"\\r", "\x09": r"\\t", "\x0B": r"\\v", } ret = "" for char in os.path.normpath(fname): ret = ret + rchars.get(char, char) # Remove superfluous double backslashes network_share = False tmp = None network_share = fname.startswith(r"\\") while tmp != ret: tmp, ret = ret, ret.replace(r"\\\\", r"\\") ret = ret.replace(r"\\\\", r"\\") # Put back network share if needed if network_share: ret = r"\\" + ret.lstrip(r"\\") return ret
[ "def", "normalize_windows_fname", "(", "fname", ",", "_force", "=", "False", ")", ":", "if", "(", "platform", ".", "system", "(", ")", ".", "lower", "(", ")", "!=", "\"windows\"", ")", "and", "(", "not", "_force", ")", ":", "# pragma: no cover", "return", "fname", "# Replace unintended escape sequences that could be in", "# the file name, like \"C:\\appdata\"", "rchars", "=", "{", "\"\\x07\"", ":", "r\"\\\\a\"", ",", "\"\\x08\"", ":", "r\"\\\\b\"", ",", "\"\\x0C\"", ":", "r\"\\\\f\"", ",", "\"\\x0A\"", ":", "r\"\\\\n\"", ",", "\"\\x0D\"", ":", "r\"\\\\r\"", ",", "\"\\x09\"", ":", "r\"\\\\t\"", ",", "\"\\x0B\"", ":", "r\"\\\\v\"", ",", "}", "ret", "=", "\"\"", "for", "char", "in", "os", ".", "path", ".", "normpath", "(", "fname", ")", ":", "ret", "=", "ret", "+", "rchars", ".", "get", "(", "char", ",", "char", ")", "# Remove superfluous double backslashes", "network_share", "=", "False", "tmp", "=", "None", "network_share", "=", "fname", ".", "startswith", "(", "r\"\\\\\"", ")", "while", "tmp", "!=", "ret", ":", "tmp", ",", "ret", "=", "ret", ",", "ret", ".", "replace", "(", "r\"\\\\\\\\\"", ",", "r\"\\\\\"", ")", "ret", "=", "ret", ".", "replace", "(", "r\"\\\\\\\\\"", ",", "r\"\\\\\"", ")", "# Put back network share if needed", "if", "network_share", ":", "ret", "=", "r\"\\\\\"", "+", "ret", ".", "lstrip", "(", "r\"\\\\\"", ")", "return", "ret" ]
r""" Fix potential problems with a Microsoft Windows file name. Superfluous backslashes are removed and unintended escape sequences are converted to their equivalent (presumably correct and intended) representation, for example :code:`r'\\\\x07pps'` is transformed to :code:`r'\\\\\\\\apps'`. A file name is considered network shares if the file does not include a drive letter and they start with a double backslash (:code:`'\\\\\\\\'`) :param fname: File name :type fname: string :rtype: string
[ "r", "Fix", "potential", "problems", "with", "a", "Microsoft", "Windows", "file", "name", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/file.py#L37-L79
pmacosta/pmisc
pmisc/rst.py
_homogenize_linesep
def _homogenize_linesep(line): """Enforce line separators to be the right one depending on platform.""" token = str(uuid.uuid4()) line = line.replace(os.linesep, token).replace("\n", "").replace("\r", "") return line.replace(token, os.linesep)
python
def _homogenize_linesep(line): """Enforce line separators to be the right one depending on platform.""" token = str(uuid.uuid4()) line = line.replace(os.linesep, token).replace("\n", "").replace("\r", "") return line.replace(token, os.linesep)
[ "def", "_homogenize_linesep", "(", "line", ")", ":", "token", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "line", "=", "line", ".", "replace", "(", "os", ".", "linesep", ",", "token", ")", ".", "replace", "(", "\"\\n\"", ",", "\"\"", ")", ".", "replace", "(", "\"\\r\"", ",", "\"\"", ")", "return", "line", ".", "replace", "(", "token", ",", "os", ".", "linesep", ")" ]
Enforce line separators to be the right one depending on platform.
[ "Enforce", "line", "separators", "to", "be", "the", "right", "one", "depending", "on", "platform", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/rst.py#L25-L29
pmacosta/pmisc
pmisc/rst.py
_proc_token
def _proc_token(spec, mlines): """Process line range tokens.""" spec = spec.strip().replace(" ", "") regexp = re.compile(r".*[^0123456789\-,]+.*") tokens = spec.split(",") cond = any([not item for item in tokens]) if ("--" in spec) or ("-," in spec) or (",-" in spec) or cond or regexp.match(spec): raise RuntimeError("Argument `lrange` is not valid") lines = [] for token in tokens: if token.count("-") > 1: raise RuntimeError("Argument `lrange` is not valid") if "-" in token: subtokens = token.split("-") lmin, lmax = ( int(subtokens[0]), int(subtokens[1]) if subtokens[1] else mlines, ) for num in range(lmin, lmax + 1): lines.append(num) else: lines.append(int(token)) if lines != sorted(lines): raise RuntimeError("Argument `lrange` is not valid") return lines
python
def _proc_token(spec, mlines): """Process line range tokens.""" spec = spec.strip().replace(" ", "") regexp = re.compile(r".*[^0123456789\-,]+.*") tokens = spec.split(",") cond = any([not item for item in tokens]) if ("--" in spec) or ("-," in spec) or (",-" in spec) or cond or regexp.match(spec): raise RuntimeError("Argument `lrange` is not valid") lines = [] for token in tokens: if token.count("-") > 1: raise RuntimeError("Argument `lrange` is not valid") if "-" in token: subtokens = token.split("-") lmin, lmax = ( int(subtokens[0]), int(subtokens[1]) if subtokens[1] else mlines, ) for num in range(lmin, lmax + 1): lines.append(num) else: lines.append(int(token)) if lines != sorted(lines): raise RuntimeError("Argument `lrange` is not valid") return lines
[ "def", "_proc_token", "(", "spec", ",", "mlines", ")", ":", "spec", "=", "spec", ".", "strip", "(", ")", ".", "replace", "(", "\" \"", ",", "\"\"", ")", "regexp", "=", "re", ".", "compile", "(", "r\".*[^0123456789\\-,]+.*\"", ")", "tokens", "=", "spec", ".", "split", "(", "\",\"", ")", "cond", "=", "any", "(", "[", "not", "item", "for", "item", "in", "tokens", "]", ")", "if", "(", "\"--\"", "in", "spec", ")", "or", "(", "\"-,\"", "in", "spec", ")", "or", "(", "\",-\"", "in", "spec", ")", "or", "cond", "or", "regexp", ".", "match", "(", "spec", ")", ":", "raise", "RuntimeError", "(", "\"Argument `lrange` is not valid\"", ")", "lines", "=", "[", "]", "for", "token", "in", "tokens", ":", "if", "token", ".", "count", "(", "\"-\"", ")", ">", "1", ":", "raise", "RuntimeError", "(", "\"Argument `lrange` is not valid\"", ")", "if", "\"-\"", "in", "token", ":", "subtokens", "=", "token", ".", "split", "(", "\"-\"", ")", "lmin", ",", "lmax", "=", "(", "int", "(", "subtokens", "[", "0", "]", ")", ",", "int", "(", "subtokens", "[", "1", "]", ")", "if", "subtokens", "[", "1", "]", "else", "mlines", ",", ")", "for", "num", "in", "range", "(", "lmin", ",", "lmax", "+", "1", ")", ":", "lines", ".", "append", "(", "num", ")", "else", ":", "lines", ".", "append", "(", "int", "(", "token", ")", ")", "if", "lines", "!=", "sorted", "(", "lines", ")", ":", "raise", "RuntimeError", "(", "\"Argument `lrange` is not valid\"", ")", "return", "lines" ]
Process line range tokens.
[ "Process", "line", "range", "tokens", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/rst.py#L32-L56
pmacosta/pmisc
pmisc/rst.py
incfile
def incfile(fname, fpointer, lrange=None, sdir=None): r""" Return a Python source file formatted in reStructuredText. .. role:: bash(code) :language: bash :param fname: File name, relative to environment variable :bash:`PKG_DOC_DIR` :type fname: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but other functions can be used for debugging :type fpointer: function object :param lrange: Line range to include, similar to Sphinx `literalinclude <http://www.sphinx-doc.org/en/master/usage /restructuredtext/directives.html #directive-literalinclude>`_ directive :type lrange: string :param sdir: Source file directory. If None the :bash:`PKG_DOC_DIR` environment variable is used if it is defined, otherwise the directory where the module is located is used :type sdir: string For example: .. code-block:: python def func(): \"\"\" This is a docstring. This file shows how to use it: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('func_example.py', cog.out) .. =]= .. code-block:: python # func_example.py if __name__ == '__main__': func() .. =[=end=]= \"\"\" return 'This is func output' """ # pylint: disable=R0914 # Read file file_dir = ( sdir if sdir else os.environ.get("PKG_DOC_DIR", os.path.abspath(os.path.dirname(__file__))) ) fname = os.path.join(file_dir, fname) with open(fname, "r") as fobj: lines = fobj.readlines() # Eliminate spurious carriage returns in Microsoft Windows lines = [_homogenize_linesep(line) for line in lines] # Parse line specification inc_lines = ( _proc_token(lrange, len(lines)) if lrange else list(range(1, len(lines) + 1)) ) # Produce output fpointer(".. code-block:: python" + os.linesep) fpointer(os.linesep) for num, line in enumerate(lines): if num + 1 in inc_lines: fpointer( " " + line.replace("\t", " ").rstrip() + os.linesep if line.strip() else os.linesep ) fpointer(os.linesep)
python
def incfile(fname, fpointer, lrange=None, sdir=None): r""" Return a Python source file formatted in reStructuredText. .. role:: bash(code) :language: bash :param fname: File name, relative to environment variable :bash:`PKG_DOC_DIR` :type fname: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but other functions can be used for debugging :type fpointer: function object :param lrange: Line range to include, similar to Sphinx `literalinclude <http://www.sphinx-doc.org/en/master/usage /restructuredtext/directives.html #directive-literalinclude>`_ directive :type lrange: string :param sdir: Source file directory. If None the :bash:`PKG_DOC_DIR` environment variable is used if it is defined, otherwise the directory where the module is located is used :type sdir: string For example: .. code-block:: python def func(): \"\"\" This is a docstring. This file shows how to use it: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('func_example.py', cog.out) .. =]= .. code-block:: python # func_example.py if __name__ == '__main__': func() .. =[=end=]= \"\"\" return 'This is func output' """ # pylint: disable=R0914 # Read file file_dir = ( sdir if sdir else os.environ.get("PKG_DOC_DIR", os.path.abspath(os.path.dirname(__file__))) ) fname = os.path.join(file_dir, fname) with open(fname, "r") as fobj: lines = fobj.readlines() # Eliminate spurious carriage returns in Microsoft Windows lines = [_homogenize_linesep(line) for line in lines] # Parse line specification inc_lines = ( _proc_token(lrange, len(lines)) if lrange else list(range(1, len(lines) + 1)) ) # Produce output fpointer(".. code-block:: python" + os.linesep) fpointer(os.linesep) for num, line in enumerate(lines): if num + 1 in inc_lines: fpointer( " " + line.replace("\t", " ").rstrip() + os.linesep if line.strip() else os.linesep ) fpointer(os.linesep)
[ "def", "incfile", "(", "fname", ",", "fpointer", ",", "lrange", "=", "None", ",", "sdir", "=", "None", ")", ":", "# pylint: disable=R0914", "# Read file", "file_dir", "=", "(", "sdir", "if", "sdir", "else", "os", ".", "environ", ".", "get", "(", "\"PKG_DOC_DIR\"", ",", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ")", ")", ")", "fname", "=", "os", ".", "path", ".", "join", "(", "file_dir", ",", "fname", ")", "with", "open", "(", "fname", ",", "\"r\"", ")", "as", "fobj", ":", "lines", "=", "fobj", ".", "readlines", "(", ")", "# Eliminate spurious carriage returns in Microsoft Windows", "lines", "=", "[", "_homogenize_linesep", "(", "line", ")", "for", "line", "in", "lines", "]", "# Parse line specification", "inc_lines", "=", "(", "_proc_token", "(", "lrange", ",", "len", "(", "lines", ")", ")", "if", "lrange", "else", "list", "(", "range", "(", "1", ",", "len", "(", "lines", ")", "+", "1", ")", ")", ")", "# Produce output", "fpointer", "(", "\".. code-block:: python\"", "+", "os", ".", "linesep", ")", "fpointer", "(", "os", ".", "linesep", ")", "for", "num", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "if", "num", "+", "1", "in", "inc_lines", ":", "fpointer", "(", "\" \"", "+", "line", ".", "replace", "(", "\"\\t\"", ",", "\" \"", ")", ".", "rstrip", "(", ")", "+", "os", ".", "linesep", "if", "line", ".", "strip", "(", ")", "else", "os", ".", "linesep", ")", "fpointer", "(", "os", ".", "linesep", ")" ]
r""" Return a Python source file formatted in reStructuredText. .. role:: bash(code) :language: bash :param fname: File name, relative to environment variable :bash:`PKG_DOC_DIR` :type fname: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but other functions can be used for debugging :type fpointer: function object :param lrange: Line range to include, similar to Sphinx `literalinclude <http://www.sphinx-doc.org/en/master/usage /restructuredtext/directives.html #directive-literalinclude>`_ directive :type lrange: string :param sdir: Source file directory. If None the :bash:`PKG_DOC_DIR` environment variable is used if it is defined, otherwise the directory where the module is located is used :type sdir: string For example: .. code-block:: python def func(): \"\"\" This is a docstring. This file shows how to use it: .. =[=cog .. import docs.support.incfile .. docs.support.incfile.incfile('func_example.py', cog.out) .. =]= .. code-block:: python # func_example.py if __name__ == '__main__': func() .. =[=end=]= \"\"\" return 'This is func output'
[ "r", "Return", "a", "Python", "source", "file", "formatted", "in", "reStructuredText", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/rst.py#L59-L133
pmacosta/pmisc
pmisc/rst.py
ste
def ste(command, nindent, mdir, fpointer, env=None): """ Print STDOUT of a shell command formatted in reStructuredText. This is a simplified version of :py:func:`pmisc.term_echo`. :param command: Shell command (relative to **mdir** if **env** is not given) :type command: string :param nindent: Indentation level :type nindent: integer :param mdir: Module directory, used if **env** is not given :type mdir: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param env: Environment dictionary. If not provided, the environment dictionary is the key "PKG_BIN_DIR" with the value of the **mdir** :type env: dictionary For example:: .. This is a reStructuredText file snippet .. [[[cog .. import os, sys .. from docs.support.term_echo import term_echo .. file_name = sys.modules['docs.support.term_echo'].__file__ .. mdir = os.path.realpath( .. os.path.dirname( .. os.path.dirname(os.path.dirname(file_name)) .. ) .. ) .. [[[cog ste('build_docs.py -h', 0, mdir, cog.out) ]]] .. code-block:: console $ ${PKG_BIN_DIR}/build_docs.py -h usage: build_docs.py [-h] [-d DIRECTORY] [-n NUM_CPUS] ... $ .. ]]] """ sdir = LDELIM + "PKG_BIN_DIR" + RDELIM command = ( sdir + ("{sep}{cmd}".format(sep=os.path.sep, cmd=command)) if env is None else command ) env = {"PKG_BIN_DIR": mdir} if env is None else env term_echo(command, nindent, env, fpointer)
python
def ste(command, nindent, mdir, fpointer, env=None): """ Print STDOUT of a shell command formatted in reStructuredText. This is a simplified version of :py:func:`pmisc.term_echo`. :param command: Shell command (relative to **mdir** if **env** is not given) :type command: string :param nindent: Indentation level :type nindent: integer :param mdir: Module directory, used if **env** is not given :type mdir: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param env: Environment dictionary. If not provided, the environment dictionary is the key "PKG_BIN_DIR" with the value of the **mdir** :type env: dictionary For example:: .. This is a reStructuredText file snippet .. [[[cog .. import os, sys .. from docs.support.term_echo import term_echo .. file_name = sys.modules['docs.support.term_echo'].__file__ .. mdir = os.path.realpath( .. os.path.dirname( .. os.path.dirname(os.path.dirname(file_name)) .. ) .. ) .. [[[cog ste('build_docs.py -h', 0, mdir, cog.out) ]]] .. code-block:: console $ ${PKG_BIN_DIR}/build_docs.py -h usage: build_docs.py [-h] [-d DIRECTORY] [-n NUM_CPUS] ... $ .. ]]] """ sdir = LDELIM + "PKG_BIN_DIR" + RDELIM command = ( sdir + ("{sep}{cmd}".format(sep=os.path.sep, cmd=command)) if env is None else command ) env = {"PKG_BIN_DIR": mdir} if env is None else env term_echo(command, nindent, env, fpointer)
[ "def", "ste", "(", "command", ",", "nindent", ",", "mdir", ",", "fpointer", ",", "env", "=", "None", ")", ":", "sdir", "=", "LDELIM", "+", "\"PKG_BIN_DIR\"", "+", "RDELIM", "command", "=", "(", "sdir", "+", "(", "\"{sep}{cmd}\"", ".", "format", "(", "sep", "=", "os", ".", "path", ".", "sep", ",", "cmd", "=", "command", ")", ")", "if", "env", "is", "None", "else", "command", ")", "env", "=", "{", "\"PKG_BIN_DIR\"", ":", "mdir", "}", "if", "env", "is", "None", "else", "env", "term_echo", "(", "command", ",", "nindent", ",", "env", ",", "fpointer", ")" ]
Print STDOUT of a shell command formatted in reStructuredText. This is a simplified version of :py:func:`pmisc.term_echo`. :param command: Shell command (relative to **mdir** if **env** is not given) :type command: string :param nindent: Indentation level :type nindent: integer :param mdir: Module directory, used if **env** is not given :type mdir: string :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param env: Environment dictionary. If not provided, the environment dictionary is the key "PKG_BIN_DIR" with the value of the **mdir** :type env: dictionary For example:: .. This is a reStructuredText file snippet .. [[[cog .. import os, sys .. from docs.support.term_echo import term_echo .. file_name = sys.modules['docs.support.term_echo'].__file__ .. mdir = os.path.realpath( .. os.path.dirname( .. os.path.dirname(os.path.dirname(file_name)) .. ) .. ) .. [[[cog ste('build_docs.py -h', 0, mdir, cog.out) ]]] .. code-block:: console $ ${PKG_BIN_DIR}/build_docs.py -h usage: build_docs.py [-h] [-d DIRECTORY] [-n NUM_CPUS] ... $ .. ]]]
[ "Print", "STDOUT", "of", "a", "shell", "command", "formatted", "in", "reStructuredText", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/rst.py#L136-L192
pmacosta/pmisc
pmisc/rst.py
term_echo
def term_echo(command, nindent=0, env=None, fpointer=None, cols=60): """ Print STDOUT of a shell command formatted in reStructuredText. .. role:: bash(code) :language: bash :param command: Shell command :type command: string :param nindent: Indentation level :type nindent: integer :param env: Environment variable replacement dictionary. The command is pre-processed and any environment variable represented in the full notation (:bash:`${...}` in Linux and OS X or :bash:`%...%` in Windows) is replaced. The dictionary key is the environment variable name and the dictionary value is the replacement value. For example, if **command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env** is :code:`{'PYTHON_CMD':'python3'}` the actual command issued is :code:`'python3 -m "x=5"'` :type env: dictionary :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param cols: Number of columns of output :type cols: integer """ # pylint: disable=R0204 # Set argparse width so that output does not need horizontal scroll # bar in narrow windows or displays os.environ["COLUMNS"] = str(cols) command_int = command if env: for var, repl in env.items(): command_int = command_int.replace('"' + LDELIM + var + RDELIM + '"', repl) command_int = command_int.replace(LDELIM + var + RDELIM, repl) tokens = command_int.split(" ") # Add Python interpreter executable for Python scripts on Windows since # the shebang does not work if (platform.system().lower() == "windows") and ( tokens[0].endswith(".py") ): # pragma: no cover tokens = [sys.executable] + tokens proc = subprocess.Popen(tokens, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout = proc.communicate()[0] if sys.hexversion >= 0x03000000: # pragma: no cover stdout = stdout.decode("utf-8") stdout = stdout.split("\n") indent = nindent * " " fpointer(os.linesep) fpointer("{0}.. code-block:: console{1}".format(indent, os.linesep)) fpointer(os.linesep) fpointer("{0} $ {1}{2}".format(indent, command, os.linesep)) for line in stdout: line = _homogenize_linesep(line) if line.strip(): fpointer(indent + " " + line.replace("\t", " ") + os.linesep) else: fpointer(os.linesep)
python
def term_echo(command, nindent=0, env=None, fpointer=None, cols=60): """ Print STDOUT of a shell command formatted in reStructuredText. .. role:: bash(code) :language: bash :param command: Shell command :type command: string :param nindent: Indentation level :type nindent: integer :param env: Environment variable replacement dictionary. The command is pre-processed and any environment variable represented in the full notation (:bash:`${...}` in Linux and OS X or :bash:`%...%` in Windows) is replaced. The dictionary key is the environment variable name and the dictionary value is the replacement value. For example, if **command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env** is :code:`{'PYTHON_CMD':'python3'}` the actual command issued is :code:`'python3 -m "x=5"'` :type env: dictionary :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param cols: Number of columns of output :type cols: integer """ # pylint: disable=R0204 # Set argparse width so that output does not need horizontal scroll # bar in narrow windows or displays os.environ["COLUMNS"] = str(cols) command_int = command if env: for var, repl in env.items(): command_int = command_int.replace('"' + LDELIM + var + RDELIM + '"', repl) command_int = command_int.replace(LDELIM + var + RDELIM, repl) tokens = command_int.split(" ") # Add Python interpreter executable for Python scripts on Windows since # the shebang does not work if (platform.system().lower() == "windows") and ( tokens[0].endswith(".py") ): # pragma: no cover tokens = [sys.executable] + tokens proc = subprocess.Popen(tokens, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) stdout = proc.communicate()[0] if sys.hexversion >= 0x03000000: # pragma: no cover stdout = stdout.decode("utf-8") stdout = stdout.split("\n") indent = nindent * " " fpointer(os.linesep) fpointer("{0}.. code-block:: console{1}".format(indent, os.linesep)) fpointer(os.linesep) fpointer("{0} $ {1}{2}".format(indent, command, os.linesep)) for line in stdout: line = _homogenize_linesep(line) if line.strip(): fpointer(indent + " " + line.replace("\t", " ") + os.linesep) else: fpointer(os.linesep)
[ "def", "term_echo", "(", "command", ",", "nindent", "=", "0", ",", "env", "=", "None", ",", "fpointer", "=", "None", ",", "cols", "=", "60", ")", ":", "# pylint: disable=R0204", "# Set argparse width so that output does not need horizontal scroll", "# bar in narrow windows or displays", "os", ".", "environ", "[", "\"COLUMNS\"", "]", "=", "str", "(", "cols", ")", "command_int", "=", "command", "if", "env", ":", "for", "var", ",", "repl", "in", "env", ".", "items", "(", ")", ":", "command_int", "=", "command_int", ".", "replace", "(", "'\"'", "+", "LDELIM", "+", "var", "+", "RDELIM", "+", "'\"'", ",", "repl", ")", "command_int", "=", "command_int", ".", "replace", "(", "LDELIM", "+", "var", "+", "RDELIM", ",", "repl", ")", "tokens", "=", "command_int", ".", "split", "(", "\" \"", ")", "# Add Python interpreter executable for Python scripts on Windows since", "# the shebang does not work", "if", "(", "platform", ".", "system", "(", ")", ".", "lower", "(", ")", "==", "\"windows\"", ")", "and", "(", "tokens", "[", "0", "]", ".", "endswith", "(", "\".py\"", ")", ")", ":", "# pragma: no cover", "tokens", "=", "[", "sys", ".", "executable", "]", "+", "tokens", "proc", "=", "subprocess", ".", "Popen", "(", "tokens", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "STDOUT", ")", "stdout", "=", "proc", ".", "communicate", "(", ")", "[", "0", "]", "if", "sys", ".", "hexversion", ">=", "0x03000000", ":", "# pragma: no cover", "stdout", "=", "stdout", ".", "decode", "(", "\"utf-8\"", ")", "stdout", "=", "stdout", ".", "split", "(", "\"\\n\"", ")", "indent", "=", "nindent", "*", "\" \"", "fpointer", "(", "os", ".", "linesep", ")", "fpointer", "(", "\"{0}.. code-block:: console{1}\"", ".", "format", "(", "indent", ",", "os", ".", "linesep", ")", ")", "fpointer", "(", "os", ".", "linesep", ")", "fpointer", "(", "\"{0} $ {1}{2}\"", ".", "format", "(", "indent", ",", "command", ",", "os", ".", "linesep", ")", ")", "for", "line", "in", "stdout", ":", "line", "=", "_homogenize_linesep", "(", "line", ")", "if", "line", ".", "strip", "(", ")", ":", "fpointer", "(", "indent", "+", "\" \"", "+", "line", ".", "replace", "(", "\"\\t\"", ",", "\" \"", ")", "+", "os", ".", "linesep", ")", "else", ":", "fpointer", "(", "os", ".", "linesep", ")" ]
Print STDOUT of a shell command formatted in reStructuredText. .. role:: bash(code) :language: bash :param command: Shell command :type command: string :param nindent: Indentation level :type nindent: integer :param env: Environment variable replacement dictionary. The command is pre-processed and any environment variable represented in the full notation (:bash:`${...}` in Linux and OS X or :bash:`%...%` in Windows) is replaced. The dictionary key is the environment variable name and the dictionary value is the replacement value. For example, if **command** is :code:`'${PYTHON_CMD} -m "x=5"'` and **env** is :code:`{'PYTHON_CMD':'python3'}` the actual command issued is :code:`'python3 -m "x=5"'` :type env: dictionary :param fpointer: Output function pointer. Normally is :code:`cog.out` but :code:`print` or other functions can be used for debugging :type fpointer: function object :param cols: Number of columns of output :type cols: integer
[ "Print", "STDOUT", "of", "a", "shell", "command", "formatted", "in", "reStructuredText", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/rst.py#L195-L258
theno/utlz
fabfile.py
flo
def flo(string): '''Return the string given by param formatted with the callers locals.''' callers_locals = {} frame = inspect.currentframe() try: outerframe = frame.f_back callers_locals = outerframe.f_locals finally: del frame return string.format(**callers_locals)
python
def flo(string): '''Return the string given by param formatted with the callers locals.''' callers_locals = {} frame = inspect.currentframe() try: outerframe = frame.f_back callers_locals = outerframe.f_locals finally: del frame return string.format(**callers_locals)
[ "def", "flo", "(", "string", ")", ":", "callers_locals", "=", "{", "}", "frame", "=", "inspect", ".", "currentframe", "(", ")", "try", ":", "outerframe", "=", "frame", ".", "f_back", "callers_locals", "=", "outerframe", ".", "f_locals", "finally", ":", "del", "frame", "return", "string", ".", "format", "(", "*", "*", "callers_locals", ")" ]
Return the string given by param formatted with the callers locals.
[ "Return", "the", "string", "given", "by", "param", "formatted", "with", "the", "callers", "locals", "." ]
train
https://github.com/theno/utlz/blob/bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389/fabfile.py#L10-L19
theno/utlz
fabfile.py
_wrap_with
def _wrap_with(color_code): '''Color wrapper. Example: >>> blue = _wrap_with('34') >>> print(blue('text')) \033[34mtext\033[0m ''' def inner(text, bold=False): '''Inner color function.''' code = color_code if bold: code = flo("1;{code}") return flo('\033[{code}m{text}\033[0m') return inner
python
def _wrap_with(color_code): '''Color wrapper. Example: >>> blue = _wrap_with('34') >>> print(blue('text')) \033[34mtext\033[0m ''' def inner(text, bold=False): '''Inner color function.''' code = color_code if bold: code = flo("1;{code}") return flo('\033[{code}m{text}\033[0m') return inner
[ "def", "_wrap_with", "(", "color_code", ")", ":", "def", "inner", "(", "text", ",", "bold", "=", "False", ")", ":", "'''Inner color function.'''", "code", "=", "color_code", "if", "bold", ":", "code", "=", "flo", "(", "\"1;{code}\"", ")", "return", "flo", "(", "'\\033[{code}m{text}\\033[0m'", ")", "return", "inner" ]
Color wrapper. Example: >>> blue = _wrap_with('34') >>> print(blue('text')) \033[34mtext\033[0m
[ "Color", "wrapper", "." ]
train
https://github.com/theno/utlz/blob/bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389/fabfile.py#L22-L36
theno/utlz
fabfile.py
clean
def clean(deltox=False): '''Delete temporary files not under version control. Args: deltox: If True, delete virtual environments used by tox ''' basedir = dirname(__file__) print(cyan('delete temp files and dirs for packaging')) local(flo( 'rm -rf ' '{basedir}/.eggs/ ' '{basedir}/utlz.egg-info/ ' '{basedir}/dist ' '{basedir}/README ' '{basedir}/build/ ' )) print(cyan('\ndelete temp files and dirs for editing')) local(flo( 'rm -rf ' '{basedir}/.cache ' '{basedir}/.ropeproject ' )) print(cyan('\ndelete bytecode compiled versions of the python src')) # cf. http://stackoverflow.com/a/30659970 local(flo('find {basedir}/utlz {basedir}/tests ') + '\( -name \*pyc -o -name \*.pyo -o -name __pycache__ ' '-o -name \*.so -o -name \*.o -o -name \*.c \) ' '-prune ' '-exec rm -rf {} +') if deltox: print(cyan('\ndelete tox virual environments')) local(flo('cd {basedir} && rm -rf .tox/'))
python
def clean(deltox=False): '''Delete temporary files not under version control. Args: deltox: If True, delete virtual environments used by tox ''' basedir = dirname(__file__) print(cyan('delete temp files and dirs for packaging')) local(flo( 'rm -rf ' '{basedir}/.eggs/ ' '{basedir}/utlz.egg-info/ ' '{basedir}/dist ' '{basedir}/README ' '{basedir}/build/ ' )) print(cyan('\ndelete temp files and dirs for editing')) local(flo( 'rm -rf ' '{basedir}/.cache ' '{basedir}/.ropeproject ' )) print(cyan('\ndelete bytecode compiled versions of the python src')) # cf. http://stackoverflow.com/a/30659970 local(flo('find {basedir}/utlz {basedir}/tests ') + '\( -name \*pyc -o -name \*.pyo -o -name __pycache__ ' '-o -name \*.so -o -name \*.o -o -name \*.c \) ' '-prune ' '-exec rm -rf {} +') if deltox: print(cyan('\ndelete tox virual environments')) local(flo('cd {basedir} && rm -rf .tox/'))
[ "def", "clean", "(", "deltox", "=", "False", ")", ":", "basedir", "=", "dirname", "(", "__file__", ")", "print", "(", "cyan", "(", "'delete temp files and dirs for packaging'", ")", ")", "local", "(", "flo", "(", "'rm -rf '", "'{basedir}/.eggs/ '", "'{basedir}/utlz.egg-info/ '", "'{basedir}/dist '", "'{basedir}/README '", "'{basedir}/build/ '", ")", ")", "print", "(", "cyan", "(", "'\\ndelete temp files and dirs for editing'", ")", ")", "local", "(", "flo", "(", "'rm -rf '", "'{basedir}/.cache '", "'{basedir}/.ropeproject '", ")", ")", "print", "(", "cyan", "(", "'\\ndelete bytecode compiled versions of the python src'", ")", ")", "# cf. http://stackoverflow.com/a/30659970", "local", "(", "flo", "(", "'find {basedir}/utlz {basedir}/tests '", ")", "+", "'\\( -name \\*pyc -o -name \\*.pyo -o -name __pycache__ '", "'-o -name \\*.so -o -name \\*.o -o -name \\*.c \\) '", "'-prune '", "'-exec rm -rf {} +'", ")", "if", "deltox", ":", "print", "(", "cyan", "(", "'\\ndelete tox virual environments'", ")", ")", "local", "(", "flo", "(", "'cd {basedir} && rm -rf .tox/'", ")", ")" ]
Delete temporary files not under version control. Args: deltox: If True, delete virtual environments used by tox
[ "Delete", "temporary", "files", "not", "under", "version", "control", "." ]
train
https://github.com/theno/utlz/blob/bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389/fabfile.py#L76-L112
theno/utlz
fabfile.py
pythons
def pythons(): '''Install latest pythons with pyenv. The python version will be activated in the projects base dir. Will skip already installed latest python versions. ''' if not _pyenv_exists(): print('\npyenv is not installed. You can install it with fabsetup ' '(https://github.com/theno/fabsetup):\n\n ' + cyan('mkdir ~/repos && cd ~/repos\n ' 'git clone https://github.com/theno/fabsetup.git\n ' 'cd fabsetup && fab setup.pyenv -H localhost')) return 1 latest_pythons = _determine_latest_pythons() print(cyan('\n## install latest python versions')) for version in latest_pythons: local(flo('pyenv install --skip-existing {version}')) print(cyan('\n## activate pythons')) basedir = dirname(__file__) latest_pythons_str = ' '.join(latest_pythons) local(flo('cd {basedir} && pyenv local system {latest_pythons_str}')) highest_python = latest_pythons[-1] print(cyan(flo( '\n## prepare Python-{highest_python} for testing and packaging'))) packages_for_testing = 'pytest tox' packages_for_packaging = 'pypandoc twine' local(flo('~/.pyenv/versions/{highest_python}/bin/pip install --upgrade ' 'pip {packages_for_testing} {packages_for_packaging}'))
python
def pythons(): '''Install latest pythons with pyenv. The python version will be activated in the projects base dir. Will skip already installed latest python versions. ''' if not _pyenv_exists(): print('\npyenv is not installed. You can install it with fabsetup ' '(https://github.com/theno/fabsetup):\n\n ' + cyan('mkdir ~/repos && cd ~/repos\n ' 'git clone https://github.com/theno/fabsetup.git\n ' 'cd fabsetup && fab setup.pyenv -H localhost')) return 1 latest_pythons = _determine_latest_pythons() print(cyan('\n## install latest python versions')) for version in latest_pythons: local(flo('pyenv install --skip-existing {version}')) print(cyan('\n## activate pythons')) basedir = dirname(__file__) latest_pythons_str = ' '.join(latest_pythons) local(flo('cd {basedir} && pyenv local system {latest_pythons_str}')) highest_python = latest_pythons[-1] print(cyan(flo( '\n## prepare Python-{highest_python} for testing and packaging'))) packages_for_testing = 'pytest tox' packages_for_packaging = 'pypandoc twine' local(flo('~/.pyenv/versions/{highest_python}/bin/pip install --upgrade ' 'pip {packages_for_testing} {packages_for_packaging}'))
[ "def", "pythons", "(", ")", ":", "if", "not", "_pyenv_exists", "(", ")", ":", "print", "(", "'\\npyenv is not installed. You can install it with fabsetup '", "'(https://github.com/theno/fabsetup):\\n\\n '", "+", "cyan", "(", "'mkdir ~/repos && cd ~/repos\\n '", "'git clone https://github.com/theno/fabsetup.git\\n '", "'cd fabsetup && fab setup.pyenv -H localhost'", ")", ")", "return", "1", "latest_pythons", "=", "_determine_latest_pythons", "(", ")", "print", "(", "cyan", "(", "'\\n## install latest python versions'", ")", ")", "for", "version", "in", "latest_pythons", ":", "local", "(", "flo", "(", "'pyenv install --skip-existing {version}'", ")", ")", "print", "(", "cyan", "(", "'\\n## activate pythons'", ")", ")", "basedir", "=", "dirname", "(", "__file__", ")", "latest_pythons_str", "=", "' '", ".", "join", "(", "latest_pythons", ")", "local", "(", "flo", "(", "'cd {basedir} && pyenv local system {latest_pythons_str}'", ")", ")", "highest_python", "=", "latest_pythons", "[", "-", "1", "]", "print", "(", "cyan", "(", "flo", "(", "'\\n## prepare Python-{highest_python} for testing and packaging'", ")", ")", ")", "packages_for_testing", "=", "'pytest tox'", "packages_for_packaging", "=", "'pypandoc twine'", "local", "(", "flo", "(", "'~/.pyenv/versions/{highest_python}/bin/pip install --upgrade '", "'pip {packages_for_testing} {packages_for_packaging}'", ")", ")" ]
Install latest pythons with pyenv. The python version will be activated in the projects base dir. Will skip already installed latest python versions.
[ "Install", "latest", "pythons", "with", "pyenv", "." ]
train
https://github.com/theno/utlz/blob/bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389/fabfile.py#L135-L167
theno/utlz
fabfile.py
tox
def tox(args=''): '''Run tox. Build package and run unit tests against several pythons. Args: args: Optional arguments passed to tox. Example: fab tox:'-e py36 -r' ''' basedir = dirname(__file__) latest_pythons = _determine_latest_pythons() # e.g. highest_minor_python: '3.6' highest_minor_python = _highest_minor(latest_pythons) _local_needs_pythons(flo('cd {basedir} && ' 'python{highest_minor_python} -m tox {args}'))
python
def tox(args=''): '''Run tox. Build package and run unit tests against several pythons. Args: args: Optional arguments passed to tox. Example: fab tox:'-e py36 -r' ''' basedir = dirname(__file__) latest_pythons = _determine_latest_pythons() # e.g. highest_minor_python: '3.6' highest_minor_python = _highest_minor(latest_pythons) _local_needs_pythons(flo('cd {basedir} && ' 'python{highest_minor_python} -m tox {args}'))
[ "def", "tox", "(", "args", "=", "''", ")", ":", "basedir", "=", "dirname", "(", "__file__", ")", "latest_pythons", "=", "_determine_latest_pythons", "(", ")", "# e.g. highest_minor_python: '3.6'", "highest_minor_python", "=", "_highest_minor", "(", "latest_pythons", ")", "_local_needs_pythons", "(", "flo", "(", "'cd {basedir} && '", "'python{highest_minor_python} -m tox {args}'", ")", ")" ]
Run tox. Build package and run unit tests against several pythons. Args: args: Optional arguments passed to tox. Example: fab tox:'-e py36 -r'
[ "Run", "tox", "." ]
train
https://github.com/theno/utlz/blob/bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389/fabfile.py#L182-L200
theno/utlz
fabfile.py
pypi
def pypi(): '''Build package and upload to pypi.''' if query_yes_no('version updated in setup.py?'): print(cyan('\n## clean-up\n')) execute(clean) basedir = dirname(__file__) latest_pythons = _determine_latest_pythons() # e.g. highest_minor: '3.6' highest_minor = _highest_minor(latest_pythons) python = flo('python{highest_minor}') print(cyan('\n## build package')) _local_needs_pythons(flo('cd {basedir} && {python} setup.py sdist')) print(cyan('\n## upload package')) local(flo('cd {basedir} && {python} -m twine upload dist/*'))
python
def pypi(): '''Build package and upload to pypi.''' if query_yes_no('version updated in setup.py?'): print(cyan('\n## clean-up\n')) execute(clean) basedir = dirname(__file__) latest_pythons = _determine_latest_pythons() # e.g. highest_minor: '3.6' highest_minor = _highest_minor(latest_pythons) python = flo('python{highest_minor}') print(cyan('\n## build package')) _local_needs_pythons(flo('cd {basedir} && {python} setup.py sdist')) print(cyan('\n## upload package')) local(flo('cd {basedir} && {python} -m twine upload dist/*'))
[ "def", "pypi", "(", ")", ":", "if", "query_yes_no", "(", "'version updated in setup.py?'", ")", ":", "print", "(", "cyan", "(", "'\\n## clean-up\\n'", ")", ")", "execute", "(", "clean", ")", "basedir", "=", "dirname", "(", "__file__", ")", "latest_pythons", "=", "_determine_latest_pythons", "(", ")", "# e.g. highest_minor: '3.6'", "highest_minor", "=", "_highest_minor", "(", "latest_pythons", ")", "python", "=", "flo", "(", "'python{highest_minor}'", ")", "print", "(", "cyan", "(", "'\\n## build package'", ")", ")", "_local_needs_pythons", "(", "flo", "(", "'cd {basedir} && {python} setup.py sdist'", ")", ")", "print", "(", "cyan", "(", "'\\n## upload package'", ")", ")", "local", "(", "flo", "(", "'cd {basedir} && {python} -m twine upload dist/*'", ")", ")" ]
Build package and upload to pypi.
[ "Build", "package", "and", "upload", "to", "pypi", "." ]
train
https://github.com/theno/utlz/blob/bf7d2b53f3e0d35c6f8ded81f3f774a74fcd3389/fabfile.py#L235-L253
greenelab/django-genes
genes/management/commands/genes_load_gene_history.py
chk_col_numbers
def chk_col_numbers(line_num, num_cols, tax_id_col, id_col, symbol_col): """ Check that none of the input column numbers is out of range. (Instead of defining this function, we could depend on Python's built-in IndexError exception for this issue, but the IndexError exception wouldn't include line number information, which is helpful for users to find exactly which line is the culprit.) """ bad_col = '' if tax_id_col >= num_cols: bad_col = 'tax_id_col' elif id_col >= num_cols: bad_col = 'discontinued_id_col' elif symbol_col >= num_cols: bad_col = 'discontinued_symbol_col' if bad_col: raise Exception( 'Input file line #%d: column number of %s is out of range' % (line_num, bad_col))
python
def chk_col_numbers(line_num, num_cols, tax_id_col, id_col, symbol_col): """ Check that none of the input column numbers is out of range. (Instead of defining this function, we could depend on Python's built-in IndexError exception for this issue, but the IndexError exception wouldn't include line number information, which is helpful for users to find exactly which line is the culprit.) """ bad_col = '' if tax_id_col >= num_cols: bad_col = 'tax_id_col' elif id_col >= num_cols: bad_col = 'discontinued_id_col' elif symbol_col >= num_cols: bad_col = 'discontinued_symbol_col' if bad_col: raise Exception( 'Input file line #%d: column number of %s is out of range' % (line_num, bad_col))
[ "def", "chk_col_numbers", "(", "line_num", ",", "num_cols", ",", "tax_id_col", ",", "id_col", ",", "symbol_col", ")", ":", "bad_col", "=", "''", "if", "tax_id_col", ">=", "num_cols", ":", "bad_col", "=", "'tax_id_col'", "elif", "id_col", ">=", "num_cols", ":", "bad_col", "=", "'discontinued_id_col'", "elif", "symbol_col", ">=", "num_cols", ":", "bad_col", "=", "'discontinued_symbol_col'", "if", "bad_col", ":", "raise", "Exception", "(", "'Input file line #%d: column number of %s is out of range'", "%", "(", "line_num", ",", "bad_col", ")", ")" ]
Check that none of the input column numbers is out of range. (Instead of defining this function, we could depend on Python's built-in IndexError exception for this issue, but the IndexError exception wouldn't include line number information, which is helpful for users to find exactly which line is the culprit.)
[ "Check", "that", "none", "of", "the", "input", "column", "numbers", "is", "out", "of", "range", ".", "(", "Instead", "of", "defining", "this", "function", "we", "could", "depend", "on", "Python", "s", "built", "-", "in", "IndexError", "exception", "for", "this", "issue", "but", "the", "IndexError", "exception", "wouldn", "t", "include", "line", "number", "information", "which", "is", "helpful", "for", "users", "to", "find", "exactly", "which", "line", "is", "the", "culprit", ".", ")" ]
train
https://github.com/greenelab/django-genes/blob/298939adcb115031acfc11cfcef60ea0b596fae5/genes/management/commands/genes_load_gene_history.py#L104-L124
greenelab/django-genes
genes/management/commands/genes_load_gene_history.py
import_gene_history
def import_gene_history(file_handle, tax_id, tax_id_col, id_col, symbol_col): """ Read input gene history file into the database. Note that the arguments tax_id_col, id_col and symbol_col have been converted into 0-based column indexes. """ # Make sure that tax_id is not "" or " " if not tax_id or tax_id.isspace(): raise Exception("Input tax_id is blank") # Make sure that tax_id exists in Organism table in the database. try: organism = Organism.objects.get(taxonomy_id=tax_id) except Organism.DoesNotExist: raise Exception('Input tax_id %s does NOT exist in Organism table. ' 'Please add it into Organism table first.' % tax_id) if tax_id_col < 0 or id_col < 0 or symbol_col < 0: raise Exception( 'tax_id_col, id_col and symbol_col must be positive integers') for line_index, line in enumerate(file_handle): if line.startswith('#'): # Skip comment lines. continue fields = line.rstrip().split('\t') # Check input column numbers. chk_col_numbers(line_index + 1, len(fields), tax_id_col, id_col, symbol_col) # Skip lines whose tax_id's do not match input tax_id. if tax_id != fields[tax_id_col]: continue entrez_id = fields[id_col] # If the gene already exists in database, set its "obsolete" attribute # to True; otherwise create a new obsolete Gene record in database. try: gene = Gene.objects.get(entrezid=entrez_id) if not gene.obsolete: gene.obsolete = True gene.save() except Gene.DoesNotExist: Gene.objects.create(entrezid=entrez_id, organism=organism, systematic_name=fields[symbol_col], obsolete=True)
python
def import_gene_history(file_handle, tax_id, tax_id_col, id_col, symbol_col): """ Read input gene history file into the database. Note that the arguments tax_id_col, id_col and symbol_col have been converted into 0-based column indexes. """ # Make sure that tax_id is not "" or " " if not tax_id or tax_id.isspace(): raise Exception("Input tax_id is blank") # Make sure that tax_id exists in Organism table in the database. try: organism = Organism.objects.get(taxonomy_id=tax_id) except Organism.DoesNotExist: raise Exception('Input tax_id %s does NOT exist in Organism table. ' 'Please add it into Organism table first.' % tax_id) if tax_id_col < 0 or id_col < 0 or symbol_col < 0: raise Exception( 'tax_id_col, id_col and symbol_col must be positive integers') for line_index, line in enumerate(file_handle): if line.startswith('#'): # Skip comment lines. continue fields = line.rstrip().split('\t') # Check input column numbers. chk_col_numbers(line_index + 1, len(fields), tax_id_col, id_col, symbol_col) # Skip lines whose tax_id's do not match input tax_id. if tax_id != fields[tax_id_col]: continue entrez_id = fields[id_col] # If the gene already exists in database, set its "obsolete" attribute # to True; otherwise create a new obsolete Gene record in database. try: gene = Gene.objects.get(entrezid=entrez_id) if not gene.obsolete: gene.obsolete = True gene.save() except Gene.DoesNotExist: Gene.objects.create(entrezid=entrez_id, organism=organism, systematic_name=fields[symbol_col], obsolete=True)
[ "def", "import_gene_history", "(", "file_handle", ",", "tax_id", ",", "tax_id_col", ",", "id_col", ",", "symbol_col", ")", ":", "# Make sure that tax_id is not \"\" or \" \"", "if", "not", "tax_id", "or", "tax_id", ".", "isspace", "(", ")", ":", "raise", "Exception", "(", "\"Input tax_id is blank\"", ")", "# Make sure that tax_id exists in Organism table in the database.", "try", ":", "organism", "=", "Organism", ".", "objects", ".", "get", "(", "taxonomy_id", "=", "tax_id", ")", "except", "Organism", ".", "DoesNotExist", ":", "raise", "Exception", "(", "'Input tax_id %s does NOT exist in Organism table. '", "'Please add it into Organism table first.'", "%", "tax_id", ")", "if", "tax_id_col", "<", "0", "or", "id_col", "<", "0", "or", "symbol_col", "<", "0", ":", "raise", "Exception", "(", "'tax_id_col, id_col and symbol_col must be positive integers'", ")", "for", "line_index", ",", "line", "in", "enumerate", "(", "file_handle", ")", ":", "if", "line", ".", "startswith", "(", "'#'", ")", ":", "# Skip comment lines.", "continue", "fields", "=", "line", ".", "rstrip", "(", ")", ".", "split", "(", "'\\t'", ")", "# Check input column numbers.", "chk_col_numbers", "(", "line_index", "+", "1", ",", "len", "(", "fields", ")", ",", "tax_id_col", ",", "id_col", ",", "symbol_col", ")", "# Skip lines whose tax_id's do not match input tax_id.", "if", "tax_id", "!=", "fields", "[", "tax_id_col", "]", ":", "continue", "entrez_id", "=", "fields", "[", "id_col", "]", "# If the gene already exists in database, set its \"obsolete\" attribute", "# to True; otherwise create a new obsolete Gene record in database.", "try", ":", "gene", "=", "Gene", ".", "objects", ".", "get", "(", "entrezid", "=", "entrez_id", ")", "if", "not", "gene", ".", "obsolete", ":", "gene", ".", "obsolete", "=", "True", "gene", ".", "save", "(", ")", "except", "Gene", ".", "DoesNotExist", ":", "Gene", ".", "objects", ".", "create", "(", "entrezid", "=", "entrez_id", ",", "organism", "=", "organism", ",", "systematic_name", "=", "fields", "[", "symbol_col", "]", ",", "obsolete", "=", "True", ")" ]
Read input gene history file into the database. Note that the arguments tax_id_col, id_col and symbol_col have been converted into 0-based column indexes.
[ "Read", "input", "gene", "history", "file", "into", "the", "database", ".", "Note", "that", "the", "arguments", "tax_id_col", "id_col", "and", "symbol_col", "have", "been", "converted", "into", "0", "-", "based", "column", "indexes", "." ]
train
https://github.com/greenelab/django-genes/blob/298939adcb115031acfc11cfcef60ea0b596fae5/genes/management/commands/genes_load_gene_history.py#L127-L173
alixnovosi/botskeleton
botskeleton/outputs/output_mastodon.py
MastodonSkeleton.cred_init
def cred_init( self, *, secrets_dir: str, log: Logger, bot_name: str="", ) -> None: """Initialize what requires credentials/secret files.""" super().__init__(secrets_dir=secrets_dir, log=log, bot_name=bot_name) self.ldebug("Retrieving ACCESS_TOKEN ...") with open(path.join(self.secrets_dir, "ACCESS_TOKEN")) as f: ACCESS_TOKEN = f.read().strip() # Instance base url optional. self.ldebug("Looking for INSTANCE_BASE_URL ...") instance_base_url_path = path.join(self.secrets_dir, "INSTANCE_BASE_URL") if path.isfile(instance_base_url_path): with open(instance_base_url_path) as f: self.instance_base_url = f.read().strip() else: self.ldebug("Couldn't find INSTANCE_BASE_URL, defaulting to mastodon.social.") self.instance_base_url = "https://mastodon.social" self.api = mastodon.Mastodon(access_token=ACCESS_TOKEN, api_base_url=self.instance_base_url) self.html_re = re.compile("<.*?>")
python
def cred_init( self, *, secrets_dir: str, log: Logger, bot_name: str="", ) -> None: """Initialize what requires credentials/secret files.""" super().__init__(secrets_dir=secrets_dir, log=log, bot_name=bot_name) self.ldebug("Retrieving ACCESS_TOKEN ...") with open(path.join(self.secrets_dir, "ACCESS_TOKEN")) as f: ACCESS_TOKEN = f.read().strip() # Instance base url optional. self.ldebug("Looking for INSTANCE_BASE_URL ...") instance_base_url_path = path.join(self.secrets_dir, "INSTANCE_BASE_URL") if path.isfile(instance_base_url_path): with open(instance_base_url_path) as f: self.instance_base_url = f.read().strip() else: self.ldebug("Couldn't find INSTANCE_BASE_URL, defaulting to mastodon.social.") self.instance_base_url = "https://mastodon.social" self.api = mastodon.Mastodon(access_token=ACCESS_TOKEN, api_base_url=self.instance_base_url) self.html_re = re.compile("<.*?>")
[ "def", "cred_init", "(", "self", ",", "*", ",", "secrets_dir", ":", "str", ",", "log", ":", "Logger", ",", "bot_name", ":", "str", "=", "\"\"", ",", ")", "->", "None", ":", "super", "(", ")", ".", "__init__", "(", "secrets_dir", "=", "secrets_dir", ",", "log", "=", "log", ",", "bot_name", "=", "bot_name", ")", "self", ".", "ldebug", "(", "\"Retrieving ACCESS_TOKEN ...\"", ")", "with", "open", "(", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"ACCESS_TOKEN\"", ")", ")", "as", "f", ":", "ACCESS_TOKEN", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "# Instance base url optional.", "self", ".", "ldebug", "(", "\"Looking for INSTANCE_BASE_URL ...\"", ")", "instance_base_url_path", "=", "path", ".", "join", "(", "self", ".", "secrets_dir", ",", "\"INSTANCE_BASE_URL\"", ")", "if", "path", ".", "isfile", "(", "instance_base_url_path", ")", ":", "with", "open", "(", "instance_base_url_path", ")", "as", "f", ":", "self", ".", "instance_base_url", "=", "f", ".", "read", "(", ")", ".", "strip", "(", ")", "else", ":", "self", ".", "ldebug", "(", "\"Couldn't find INSTANCE_BASE_URL, defaulting to mastodon.social.\"", ")", "self", ".", "instance_base_url", "=", "\"https://mastodon.social\"", "self", ".", "api", "=", "mastodon", ".", "Mastodon", "(", "access_token", "=", "ACCESS_TOKEN", ",", "api_base_url", "=", "self", ".", "instance_base_url", ")", "self", ".", "html_re", "=", "re", ".", "compile", "(", "\"<.*?>\"", ")" ]
Initialize what requires credentials/secret files.
[ "Initialize", "what", "requires", "credentials", "/", "secret", "files", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_mastodon.py#L18-L44
alixnovosi/botskeleton
botskeleton/outputs/output_mastodon.py
MastodonSkeleton.send
def send( self, *, text: str, ) -> List[OutputRecord]: """ Send mastodon message. :param text: text to send in post. :returns: list of output records, each corresponding to either a single post, or an error. """ try: status = self.api.status_post(status=text) return [TootRecord(record_data={ "toot_id": status["id"], "text": text })] except mastodon.MastodonError as e: return [self.handle_error((f"Bot {self.bot_name} encountered an error when " f"sending post {text} without media:\n{e}\n"), e)]
python
def send( self, *, text: str, ) -> List[OutputRecord]: """ Send mastodon message. :param text: text to send in post. :returns: list of output records, each corresponding to either a single post, or an error. """ try: status = self.api.status_post(status=text) return [TootRecord(record_data={ "toot_id": status["id"], "text": text })] except mastodon.MastodonError as e: return [self.handle_error((f"Bot {self.bot_name} encountered an error when " f"sending post {text} without media:\n{e}\n"), e)]
[ "def", "send", "(", "self", ",", "*", ",", "text", ":", "str", ",", ")", "->", "List", "[", "OutputRecord", "]", ":", "try", ":", "status", "=", "self", ".", "api", ".", "status_post", "(", "status", "=", "text", ")", "return", "[", "TootRecord", "(", "record_data", "=", "{", "\"toot_id\"", ":", "status", "[", "\"id\"", "]", ",", "\"text\"", ":", "text", "}", ")", "]", "except", "mastodon", ".", "MastodonError", "as", "e", ":", "return", "[", "self", ".", "handle_error", "(", "(", "f\"Bot {self.bot_name} encountered an error when \"", "f\"sending post {text} without media:\\n{e}\\n\"", ")", ",", "e", ")", "]" ]
Send mastodon message. :param text: text to send in post. :returns: list of output records, each corresponding to either a single post, or an error.
[ "Send", "mastodon", "message", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_mastodon.py#L46-L70
alixnovosi/botskeleton
botskeleton/outputs/output_mastodon.py
MastodonSkeleton.send_with_media
def send_with_media( self, *, text: str, files: List[str], captions: List[str]=[], ) -> List[OutputRecord]: """ Upload media to mastodon, and send status and media, and captions if present. :param text: post text. :param files: list of files to upload with post. :param captions: list of captions to include as alt-text with files. :returns: list of output records, each corresponding to either a single post, or an error. """ try: self.ldebug(f"Uploading files {files}.") if captions is None: captions = [] if len(files) > len(captions): captions.extend([self.default_caption_message] * (len(files) - len(captions))) media_dicts = [] for i, file in enumerate(files): caption = captions[i] media_dicts.append(self.api.media_post(file, description=caption)) self.ldebug(f"Media ids {media_dicts}") except mastodon.MastodonError as e: return [self.handle_error( f"Bot {self.bot_name} encountered an error when uploading {files}:\n{e}\n", e )] try: status = self.api.status_post(status=text, media_ids=media_dicts) self.ldebug(f"Status object from toot: {status}.") return [TootRecord(record_data={ "toot_id": status["id"], "text": text, "media_ids": media_dicts, "captions": captions })] except mastodon.MastodonError as e: return [self.handle_error((f"Bot {self.bot_name} encountered an error when " f"sending post {text} with media dicts {media_dicts}:" f"\n{e}\n"), e)]
python
def send_with_media( self, *, text: str, files: List[str], captions: List[str]=[], ) -> List[OutputRecord]: """ Upload media to mastodon, and send status and media, and captions if present. :param text: post text. :param files: list of files to upload with post. :param captions: list of captions to include as alt-text with files. :returns: list of output records, each corresponding to either a single post, or an error. """ try: self.ldebug(f"Uploading files {files}.") if captions is None: captions = [] if len(files) > len(captions): captions.extend([self.default_caption_message] * (len(files) - len(captions))) media_dicts = [] for i, file in enumerate(files): caption = captions[i] media_dicts.append(self.api.media_post(file, description=caption)) self.ldebug(f"Media ids {media_dicts}") except mastodon.MastodonError as e: return [self.handle_error( f"Bot {self.bot_name} encountered an error when uploading {files}:\n{e}\n", e )] try: status = self.api.status_post(status=text, media_ids=media_dicts) self.ldebug(f"Status object from toot: {status}.") return [TootRecord(record_data={ "toot_id": status["id"], "text": text, "media_ids": media_dicts, "captions": captions })] except mastodon.MastodonError as e: return [self.handle_error((f"Bot {self.bot_name} encountered an error when " f"sending post {text} with media dicts {media_dicts}:" f"\n{e}\n"), e)]
[ "def", "send_with_media", "(", "self", ",", "*", ",", "text", ":", "str", ",", "files", ":", "List", "[", "str", "]", ",", "captions", ":", "List", "[", "str", "]", "=", "[", "]", ",", ")", "->", "List", "[", "OutputRecord", "]", ":", "try", ":", "self", ".", "ldebug", "(", "f\"Uploading files {files}.\"", ")", "if", "captions", "is", "None", ":", "captions", "=", "[", "]", "if", "len", "(", "files", ")", ">", "len", "(", "captions", ")", ":", "captions", ".", "extend", "(", "[", "self", ".", "default_caption_message", "]", "*", "(", "len", "(", "files", ")", "-", "len", "(", "captions", ")", ")", ")", "media_dicts", "=", "[", "]", "for", "i", ",", "file", "in", "enumerate", "(", "files", ")", ":", "caption", "=", "captions", "[", "i", "]", "media_dicts", ".", "append", "(", "self", ".", "api", ".", "media_post", "(", "file", ",", "description", "=", "caption", ")", ")", "self", ".", "ldebug", "(", "f\"Media ids {media_dicts}\"", ")", "except", "mastodon", ".", "MastodonError", "as", "e", ":", "return", "[", "self", ".", "handle_error", "(", "f\"Bot {self.bot_name} encountered an error when uploading {files}:\\n{e}\\n\"", ",", "e", ")", "]", "try", ":", "status", "=", "self", ".", "api", ".", "status_post", "(", "status", "=", "text", ",", "media_ids", "=", "media_dicts", ")", "self", ".", "ldebug", "(", "f\"Status object from toot: {status}.\"", ")", "return", "[", "TootRecord", "(", "record_data", "=", "{", "\"toot_id\"", ":", "status", "[", "\"id\"", "]", ",", "\"text\"", ":", "text", ",", "\"media_ids\"", ":", "media_dicts", ",", "\"captions\"", ":", "captions", "}", ")", "]", "except", "mastodon", ".", "MastodonError", "as", "e", ":", "return", "[", "self", ".", "handle_error", "(", "(", "f\"Bot {self.bot_name} encountered an error when \"", "f\"sending post {text} with media dicts {media_dicts}:\"", "f\"\\n{e}\\n\"", ")", ",", "e", ")", "]" ]
Upload media to mastodon, and send status and media, and captions if present. :param text: post text. :param files: list of files to upload with post. :param captions: list of captions to include as alt-text with files. :returns: list of output records, each corresponding to either a single post, or an error.
[ "Upload", "media", "to", "mastodon", "and", "send", "status", "and", "media", "and", "captions", "if", "present", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_mastodon.py#L72-L125
alixnovosi/botskeleton
botskeleton/outputs/output_mastodon.py
MastodonSkeleton.perform_batch_reply
def perform_batch_reply( self, *, callback: Callable[..., str], lookback_limit: int, target_handle: str, ) -> List[OutputRecord]: """ Performs batch reply on target account. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param target: the id of the target account. :param lookback_limit: a lookback limit of how many messages to consider. :returns: list of output records, each corresponding to either a single post, or an error. """ self.log.info(f"Attempting to batch reply to mastodon user {target_handle}") # target handle should be able to be provided either as @user or @user@domain # note that this produces an empty first chunk handle_chunks = target_handle.split("@") target_base_handle = handle_chunks[1] records: List[OutputRecord] = [] our_id = self.api.account_verify_credentials()["id"] # be careful here - we're using a search to do this, # and if we're not careful we'll pull up people just mentioning the target. possible_accounts = self.api.account_search(target_handle, following=True) their_id = None for account in possible_accounts: if account["username"] == target_base_handle: their_id = account["id"] break if their_id is None: return [self.handle_error(f"Could not find target handle {target_handle}!", None)] statuses = self.api.account_statuses(their_id, limit=lookback_limit) for status in statuses: status_id = status.id # find possible replies we've made. our_statuses = self.api.account_statuses(our_id, since_id=status_id) in_reply_to_ids = list(map(lambda x: x.in_reply_to_id, our_statuses)) if status_id not in in_reply_to_ids: encoded_status_text = re.sub(self.html_re, "", status.content) status_text = html.unescape(encoded_status_text) message = callback(message_id=status_id, message=status_text, extra_keys={}) self.log.info(f"Replying {message} to status {status_id} from {target_handle}.") try: new_status = self.api.status_post(status=message, in_reply_to_id=status_id) records.append(TootRecord(record_data={ "toot_id": new_status.id, "in_reply_to": target_handle, "in_reply_to_id": status_id, "text": message, })) except mastodon.MastodonError as e: records.append( self.handle_error((f"Bot {self.bot_name} encountered an error when " f"sending post {message} during a batch reply " f":\n{e}\n"), e)) else: self.log.info(f"Not replying to status {status_id} from {target_handle} " f"- we already replied.") return records
python
def perform_batch_reply( self, *, callback: Callable[..., str], lookback_limit: int, target_handle: str, ) -> List[OutputRecord]: """ Performs batch reply on target account. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param target: the id of the target account. :param lookback_limit: a lookback limit of how many messages to consider. :returns: list of output records, each corresponding to either a single post, or an error. """ self.log.info(f"Attempting to batch reply to mastodon user {target_handle}") # target handle should be able to be provided either as @user or @user@domain # note that this produces an empty first chunk handle_chunks = target_handle.split("@") target_base_handle = handle_chunks[1] records: List[OutputRecord] = [] our_id = self.api.account_verify_credentials()["id"] # be careful here - we're using a search to do this, # and if we're not careful we'll pull up people just mentioning the target. possible_accounts = self.api.account_search(target_handle, following=True) their_id = None for account in possible_accounts: if account["username"] == target_base_handle: their_id = account["id"] break if their_id is None: return [self.handle_error(f"Could not find target handle {target_handle}!", None)] statuses = self.api.account_statuses(their_id, limit=lookback_limit) for status in statuses: status_id = status.id # find possible replies we've made. our_statuses = self.api.account_statuses(our_id, since_id=status_id) in_reply_to_ids = list(map(lambda x: x.in_reply_to_id, our_statuses)) if status_id not in in_reply_to_ids: encoded_status_text = re.sub(self.html_re, "", status.content) status_text = html.unescape(encoded_status_text) message = callback(message_id=status_id, message=status_text, extra_keys={}) self.log.info(f"Replying {message} to status {status_id} from {target_handle}.") try: new_status = self.api.status_post(status=message, in_reply_to_id=status_id) records.append(TootRecord(record_data={ "toot_id": new_status.id, "in_reply_to": target_handle, "in_reply_to_id": status_id, "text": message, })) except mastodon.MastodonError as e: records.append( self.handle_error((f"Bot {self.bot_name} encountered an error when " f"sending post {message} during a batch reply " f":\n{e}\n"), e)) else: self.log.info(f"Not replying to status {status_id} from {target_handle} " f"- we already replied.") return records
[ "def", "perform_batch_reply", "(", "self", ",", "*", ",", "callback", ":", "Callable", "[", "...", ",", "str", "]", ",", "lookback_limit", ":", "int", ",", "target_handle", ":", "str", ",", ")", "->", "List", "[", "OutputRecord", "]", ":", "self", ".", "log", ".", "info", "(", "f\"Attempting to batch reply to mastodon user {target_handle}\"", ")", "# target handle should be able to be provided either as @user or @user@domain", "# note that this produces an empty first chunk", "handle_chunks", "=", "target_handle", ".", "split", "(", "\"@\"", ")", "target_base_handle", "=", "handle_chunks", "[", "1", "]", "records", ":", "List", "[", "OutputRecord", "]", "=", "[", "]", "our_id", "=", "self", ".", "api", ".", "account_verify_credentials", "(", ")", "[", "\"id\"", "]", "# be careful here - we're using a search to do this,", "# and if we're not careful we'll pull up people just mentioning the target.", "possible_accounts", "=", "self", ".", "api", ".", "account_search", "(", "target_handle", ",", "following", "=", "True", ")", "their_id", "=", "None", "for", "account", "in", "possible_accounts", ":", "if", "account", "[", "\"username\"", "]", "==", "target_base_handle", ":", "their_id", "=", "account", "[", "\"id\"", "]", "break", "if", "their_id", "is", "None", ":", "return", "[", "self", ".", "handle_error", "(", "f\"Could not find target handle {target_handle}!\"", ",", "None", ")", "]", "statuses", "=", "self", ".", "api", ".", "account_statuses", "(", "their_id", ",", "limit", "=", "lookback_limit", ")", "for", "status", "in", "statuses", ":", "status_id", "=", "status", ".", "id", "# find possible replies we've made.", "our_statuses", "=", "self", ".", "api", ".", "account_statuses", "(", "our_id", ",", "since_id", "=", "status_id", ")", "in_reply_to_ids", "=", "list", "(", "map", "(", "lambda", "x", ":", "x", ".", "in_reply_to_id", ",", "our_statuses", ")", ")", "if", "status_id", "not", "in", "in_reply_to_ids", ":", "encoded_status_text", "=", "re", ".", "sub", "(", "self", ".", "html_re", ",", "\"\"", ",", "status", ".", "content", ")", "status_text", "=", "html", ".", "unescape", "(", "encoded_status_text", ")", "message", "=", "callback", "(", "message_id", "=", "status_id", ",", "message", "=", "status_text", ",", "extra_keys", "=", "{", "}", ")", "self", ".", "log", ".", "info", "(", "f\"Replying {message} to status {status_id} from {target_handle}.\"", ")", "try", ":", "new_status", "=", "self", ".", "api", ".", "status_post", "(", "status", "=", "message", ",", "in_reply_to_id", "=", "status_id", ")", "records", ".", "append", "(", "TootRecord", "(", "record_data", "=", "{", "\"toot_id\"", ":", "new_status", ".", "id", ",", "\"in_reply_to\"", ":", "target_handle", ",", "\"in_reply_to_id\"", ":", "status_id", ",", "\"text\"", ":", "message", ",", "}", ")", ")", "except", "mastodon", ".", "MastodonError", "as", "e", ":", "records", ".", "append", "(", "self", ".", "handle_error", "(", "(", "f\"Bot {self.bot_name} encountered an error when \"", "f\"sending post {message} during a batch reply \"", "f\":\\n{e}\\n\"", ")", ",", "e", ")", ")", "else", ":", "self", ".", "log", ".", "info", "(", "f\"Not replying to status {status_id} from {target_handle} \"", "f\"- we already replied.\"", ")", "return", "records" ]
Performs batch reply on target account. Looks up the recent messages of the target user, applies the callback, and replies with what the callback generates. :param callback: a callback taking a message id, message contents, and optional extra keys, and returning a message string. :param target: the id of the target account. :param lookback_limit: a lookback limit of how many messages to consider. :returns: list of output records, each corresponding to either a single post, or an error.
[ "Performs", "batch", "reply", "on", "target", "account", ".", "Looks", "up", "the", "recent", "messages", "of", "the", "target", "user", "applies", "the", "callback", "and", "replies", "with", "what", "the", "callback", "generates", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_mastodon.py#L127-L208
alixnovosi/botskeleton
botskeleton/outputs/output_mastodon.py
MastodonSkeleton.handle_error
def handle_error(self, message: str, e: mastodon.MastodonError) -> OutputRecord: """Handle error while trying to do something.""" self.lerror(f"Got an error! {e}") # Handle errors if we know how. try: code = e[0]["code"] if code in self.handled_errors: self.handled_errors[code] else: pass except Exception: pass return TootRecord(error=e)
python
def handle_error(self, message: str, e: mastodon.MastodonError) -> OutputRecord: """Handle error while trying to do something.""" self.lerror(f"Got an error! {e}") # Handle errors if we know how. try: code = e[0]["code"] if code in self.handled_errors: self.handled_errors[code] else: pass except Exception: pass return TootRecord(error=e)
[ "def", "handle_error", "(", "self", ",", "message", ":", "str", ",", "e", ":", "mastodon", ".", "MastodonError", ")", "->", "OutputRecord", ":", "self", ".", "lerror", "(", "f\"Got an error! {e}\"", ")", "# Handle errors if we know how.", "try", ":", "code", "=", "e", "[", "0", "]", "[", "\"code\"", "]", "if", "code", "in", "self", ".", "handled_errors", ":", "self", ".", "handled_errors", "[", "code", "]", "else", ":", "pass", "except", "Exception", ":", "pass", "return", "TootRecord", "(", "error", "=", "e", ")" ]
Handle error while trying to do something.
[ "Handle", "error", "while", "trying", "to", "do", "something", "." ]
train
https://github.com/alixnovosi/botskeleton/blob/55bfc1b8a3623c10437e4ab2cd0b0ec8d35907a9/botskeleton/outputs/output_mastodon.py#L214-L229
dongying/dear
dear/spectrum/_base.py
SpectrogramFile._read_header
def _read_header(self): ''' Little-endian |... 4 bytes unsigned int ...|... 4 bytes unsigned int ...| | frames count | dimensions count | ''' self._fh.seek(0) buf = self._fh.read(4*2) fc, dc = struct.unpack("<II", buf) return fc, dc
python
def _read_header(self): ''' Little-endian |... 4 bytes unsigned int ...|... 4 bytes unsigned int ...| | frames count | dimensions count | ''' self._fh.seek(0) buf = self._fh.read(4*2) fc, dc = struct.unpack("<II", buf) return fc, dc
[ "def", "_read_header", "(", "self", ")", ":", "self", ".", "_fh", ".", "seek", "(", "0", ")", "buf", "=", "self", ".", "_fh", ".", "read", "(", "4", "*", "2", ")", "fc", ",", "dc", "=", "struct", ".", "unpack", "(", "\"<II\"", ",", "buf", ")", "return", "fc", ",", "dc" ]
Little-endian |... 4 bytes unsigned int ...|... 4 bytes unsigned int ...| | frames count | dimensions count |
[ "Little", "-", "endian", "|", "...", "4", "bytes", "unsigned", "int", "...", "|", "...", "4", "bytes", "unsigned", "int", "...", "|", "|", "frames", "count", "|", "dimensions", "count", "|" ]
train
https://github.com/dongying/dear/blob/6f9a4f63bf3ee197dc03d7d2bd0451a83906d2ba/dear/spectrum/_base.py#L33-L42
limix/optimix
optimix/_types.py
Scalar.listen
def listen(self, you): """ Request a callback for value modification. Parameters ---------- you : object An instance having ``__call__`` attribute. """ self._listeners.append(you) self.raw.talk_to(you)
python
def listen(self, you): """ Request a callback for value modification. Parameters ---------- you : object An instance having ``__call__`` attribute. """ self._listeners.append(you) self.raw.talk_to(you)
[ "def", "listen", "(", "self", ",", "you", ")", ":", "self", ".", "_listeners", ".", "append", "(", "you", ")", "self", ".", "raw", ".", "talk_to", "(", "you", ")" ]
Request a callback for value modification. Parameters ---------- you : object An instance having ``__call__`` attribute.
[ "Request", "a", "callback", "for", "value", "modification", "." ]
train
https://github.com/limix/optimix/blob/d7b1356df259c9f6ee0d658258fb47d0074fc416/optimix/_types.py#L96-L106
koenedaele/skosprovider_oe
skosprovider_oe/providers.py
OnroerendErfgoedProvider._get_term_by_id
def _get_term_by_id(self, id): '''Simple utility function to load a term. ''' url = (self.url + '/%s.json') % id r = self.session.get(url) return r.json()
python
def _get_term_by_id(self, id): '''Simple utility function to load a term. ''' url = (self.url + '/%s.json') % id r = self.session.get(url) return r.json()
[ "def", "_get_term_by_id", "(", "self", ",", "id", ")", ":", "url", "=", "(", "self", ".", "url", "+", "'/%s.json'", ")", "%", "id", "r", "=", "self", ".", "session", ".", "get", "(", "url", ")", "return", "r", ".", "json", "(", ")" ]
Simple utility function to load a term.
[ "Simple", "utility", "function", "to", "load", "a", "term", "." ]
train
https://github.com/koenedaele/skosprovider_oe/blob/099b23cccd3884b06354102955dbc71f59d8fdb0/skosprovider_oe/providers.py#L167-L172
koenedaele/skosprovider_oe
skosprovider_oe/providers.py
OnroerendErfgoedProvider.get_top_display
def get_top_display(self, **kwargs): ''' Returns all concepts or collections that form the top-level of a display hierarchy. As opposed to the :meth:`get_top_concepts`, this method can possibly return both concepts and collections. :rtype: Returns a list of concepts and collections. For each an id is present and a label. The label is determined by looking at the `**kwargs` parameter, the default language of the provider and falls back to `en` if nothing is present. ''' language = self._get_language(**kwargs) url = self.url + '/lijst.json' args = {'type[]': ['HR']} r = self.session.get(url, params=args) result = r.json() items = result top = self.get_by_id(items[0]['id']) res = [] def expand_coll(res, coll): for nid in coll.members: c = self.get_by_id(nid) res.append({ 'id': c.id, 'label': c.label(language) }) return res return expand_coll(res, top)
python
def get_top_display(self, **kwargs): ''' Returns all concepts or collections that form the top-level of a display hierarchy. As opposed to the :meth:`get_top_concepts`, this method can possibly return both concepts and collections. :rtype: Returns a list of concepts and collections. For each an id is present and a label. The label is determined by looking at the `**kwargs` parameter, the default language of the provider and falls back to `en` if nothing is present. ''' language = self._get_language(**kwargs) url = self.url + '/lijst.json' args = {'type[]': ['HR']} r = self.session.get(url, params=args) result = r.json() items = result top = self.get_by_id(items[0]['id']) res = [] def expand_coll(res, coll): for nid in coll.members: c = self.get_by_id(nid) res.append({ 'id': c.id, 'label': c.label(language) }) return res return expand_coll(res, top)
[ "def", "get_top_display", "(", "self", ",", "*", "*", "kwargs", ")", ":", "language", "=", "self", ".", "_get_language", "(", "*", "*", "kwargs", ")", "url", "=", "self", ".", "url", "+", "'/lijst.json'", "args", "=", "{", "'type[]'", ":", "[", "'HR'", "]", "}", "r", "=", "self", ".", "session", ".", "get", "(", "url", ",", "params", "=", "args", ")", "result", "=", "r", ".", "json", "(", ")", "items", "=", "result", "top", "=", "self", ".", "get_by_id", "(", "items", "[", "0", "]", "[", "'id'", "]", ")", "res", "=", "[", "]", "def", "expand_coll", "(", "res", ",", "coll", ")", ":", "for", "nid", "in", "coll", ".", "members", ":", "c", "=", "self", ".", "get_by_id", "(", "nid", ")", "res", ".", "append", "(", "{", "'id'", ":", "c", ".", "id", ",", "'label'", ":", "c", ".", "label", "(", "language", ")", "}", ")", "return", "res", "return", "expand_coll", "(", "res", ",", "top", ")" ]
Returns all concepts or collections that form the top-level of a display hierarchy. As opposed to the :meth:`get_top_concepts`, this method can possibly return both concepts and collections. :rtype: Returns a list of concepts and collections. For each an id is present and a label. The label is determined by looking at the `**kwargs` parameter, the default language of the provider and falls back to `en` if nothing is present.
[ "Returns", "all", "concepts", "or", "collections", "that", "form", "the", "top", "-", "level", "of", "a", "display", "hierarchy", "." ]
train
https://github.com/koenedaele/skosprovider_oe/blob/099b23cccd3884b06354102955dbc71f59d8fdb0/skosprovider_oe/providers.py#L246-L275
koenedaele/skosprovider_oe
skosprovider_oe/providers.py
OnroerendErfgoedProvider.get_children_display
def get_children_display(self, id, **kwargs): ''' Return a list of concepts or collections that should be displayed under this concept or collection. :param id: A concept or collection id. :rtype: A list of concepts and collections. For each an id is present and a label. The label is determined by looking at the `**kwargs` parameter, the default language of the provider and falls back to `en` if nothing is present. If the id does not exist, return `False`. ''' language = self._get_language(**kwargs) item = self.get_by_id(id) res = [] if isinstance(item, Collection): for mid in item.members: m = self.get_by_id(mid) res.append({ 'id': m.id, 'label': m.label(language) }) else: for cid in item.narrower: c = self.get_by_id(cid) res.append({ 'id': c.id, 'label': c.label(language) }) return res
python
def get_children_display(self, id, **kwargs): ''' Return a list of concepts or collections that should be displayed under this concept or collection. :param id: A concept or collection id. :rtype: A list of concepts and collections. For each an id is present and a label. The label is determined by looking at the `**kwargs` parameter, the default language of the provider and falls back to `en` if nothing is present. If the id does not exist, return `False`. ''' language = self._get_language(**kwargs) item = self.get_by_id(id) res = [] if isinstance(item, Collection): for mid in item.members: m = self.get_by_id(mid) res.append({ 'id': m.id, 'label': m.label(language) }) else: for cid in item.narrower: c = self.get_by_id(cid) res.append({ 'id': c.id, 'label': c.label(language) }) return res
[ "def", "get_children_display", "(", "self", ",", "id", ",", "*", "*", "kwargs", ")", ":", "language", "=", "self", ".", "_get_language", "(", "*", "*", "kwargs", ")", "item", "=", "self", ".", "get_by_id", "(", "id", ")", "res", "=", "[", "]", "if", "isinstance", "(", "item", ",", "Collection", ")", ":", "for", "mid", "in", "item", ".", "members", ":", "m", "=", "self", ".", "get_by_id", "(", "mid", ")", "res", ".", "append", "(", "{", "'id'", ":", "m", ".", "id", ",", "'label'", ":", "m", ".", "label", "(", "language", ")", "}", ")", "else", ":", "for", "cid", "in", "item", ".", "narrower", ":", "c", "=", "self", ".", "get_by_id", "(", "cid", ")", "res", ".", "append", "(", "{", "'id'", ":", "c", ".", "id", ",", "'label'", ":", "c", ".", "label", "(", "language", ")", "}", ")", "return", "res" ]
Return a list of concepts or collections that should be displayed under this concept or collection. :param id: A concept or collection id. :rtype: A list of concepts and collections. For each an id is present and a label. The label is determined by looking at the `**kwargs` parameter, the default language of the provider and falls back to `en` if nothing is present. If the id does not exist, return `False`.
[ "Return", "a", "list", "of", "concepts", "or", "collections", "that", "should", "be", "displayed", "under", "this", "concept", "or", "collection", "." ]
train
https://github.com/koenedaele/skosprovider_oe/blob/099b23cccd3884b06354102955dbc71f59d8fdb0/skosprovider_oe/providers.py#L277-L306
greenelab/django-genes
genes/utils.py
translate_genes
def translate_genes(id_list=None, from_id=None, to_id=None, organism=None): """ Pass a list of identifiers (id_list), the name of the database ('Entrez', 'Symbol', 'Standard name', 'Systematic name' or a loaded crossreference database) that you wish to translate from, and the name of the database that you wish to translate to. """ ids = set(id_list) # Initialize set of identifiers not found by this translate_genes method. not_found = set() from_ids = None # Get the map of from_ids to the gene pks if organism is not None: gene_objects_manager = Gene.objects.filter( organism__scientific_name=organism) else: gene_objects_manager = Gene.objects if (from_id == 'Entrez'): int_list = [] for x in ids: try: int_list.append(int(x)) except(ValueError): not_found.add(x) ids = set(int_list) from_ids = gene_objects_manager.filter(entrezid__in=ids).values_list( 'entrezid', 'id') elif (from_id == 'Systematic name'): from_ids = gene_objects_manager.filter( systematic_name__in=ids).values_list('systematic_name', 'id') elif (from_id == 'Standard name'): from_ids = gene_objects_manager.filter( standard_name__in=ids).values_list('standard_name', 'id') elif (from_id == 'Symbol'): # If standard_name exists, symbol will be standard_name; otherwise # symbol will be systematic_name. from_ids = gene_objects_manager.annotate( symbol=Coalesce('standard_name', 'systematic_name')).filter( symbol__in=ids).values_list('symbol', 'id') else: # a crossreference db? xrdb = CrossRefDB.objects.get(name=from_id) from_ids = CrossRef.objects.filter(crossrefdb=xrdb).values_list( 'xrid', 'gene__id') # Dictionary that maps from type ID passed by user to gene__id. from_id_map = {} gene_ids = [] for item in from_ids: from_id_map[item[0]] = item[1] gene_ids.append(item[1]) # Now let's figure out what we need to go to: to_ids = None if (to_id == 'Entrez'): to_ids = Gene.objects.filter(id__in=gene_ids).values_list( 'id', 'entrezid') elif (to_id == 'Systematic name'): to_ids = Gene.objects.filter(id__in=gene_ids).values_list( 'id', 'systematic_name') elif (to_id == 'Standard name'): to_ids = Gene.objects.filter(id__in=gene_ids).values_list( 'id', 'standard_name') elif (to_id == 'Symbol'): # If standard_name exists, symbol will be standard_name; otherwise # symbol will be systematic_name. to_ids = Gene.objects.annotate( symbol=Coalesce('standard_name', 'systematic_name')).filter( id__in=gene_ids).values_list('id', 'symbol') else: # A crossreference db? xrdb = CrossRefDB.objects.get(name=to_id) to_ids = CrossRef.objects.filter(crossrefdb=xrdb).values_list( 'gene__id', 'xrid') to_id_map = {} for item in to_ids: if not item[0] in to_id_map: to_id_map[item[0]] = [item[1], ] else: to_id_map[item[0]].append(item[1]) from_to = {} for item in ids: try: gene_id = from_id_map[item] except KeyError: not_found.add(item) continue to_id = to_id_map[gene_id] from_to[item] = to_id from_to['not_found'] = list(not_found) return from_to
python
def translate_genes(id_list=None, from_id=None, to_id=None, organism=None): """ Pass a list of identifiers (id_list), the name of the database ('Entrez', 'Symbol', 'Standard name', 'Systematic name' or a loaded crossreference database) that you wish to translate from, and the name of the database that you wish to translate to. """ ids = set(id_list) # Initialize set of identifiers not found by this translate_genes method. not_found = set() from_ids = None # Get the map of from_ids to the gene pks if organism is not None: gene_objects_manager = Gene.objects.filter( organism__scientific_name=organism) else: gene_objects_manager = Gene.objects if (from_id == 'Entrez'): int_list = [] for x in ids: try: int_list.append(int(x)) except(ValueError): not_found.add(x) ids = set(int_list) from_ids = gene_objects_manager.filter(entrezid__in=ids).values_list( 'entrezid', 'id') elif (from_id == 'Systematic name'): from_ids = gene_objects_manager.filter( systematic_name__in=ids).values_list('systematic_name', 'id') elif (from_id == 'Standard name'): from_ids = gene_objects_manager.filter( standard_name__in=ids).values_list('standard_name', 'id') elif (from_id == 'Symbol'): # If standard_name exists, symbol will be standard_name; otherwise # symbol will be systematic_name. from_ids = gene_objects_manager.annotate( symbol=Coalesce('standard_name', 'systematic_name')).filter( symbol__in=ids).values_list('symbol', 'id') else: # a crossreference db? xrdb = CrossRefDB.objects.get(name=from_id) from_ids = CrossRef.objects.filter(crossrefdb=xrdb).values_list( 'xrid', 'gene__id') # Dictionary that maps from type ID passed by user to gene__id. from_id_map = {} gene_ids = [] for item in from_ids: from_id_map[item[0]] = item[1] gene_ids.append(item[1]) # Now let's figure out what we need to go to: to_ids = None if (to_id == 'Entrez'): to_ids = Gene.objects.filter(id__in=gene_ids).values_list( 'id', 'entrezid') elif (to_id == 'Systematic name'): to_ids = Gene.objects.filter(id__in=gene_ids).values_list( 'id', 'systematic_name') elif (to_id == 'Standard name'): to_ids = Gene.objects.filter(id__in=gene_ids).values_list( 'id', 'standard_name') elif (to_id == 'Symbol'): # If standard_name exists, symbol will be standard_name; otherwise # symbol will be systematic_name. to_ids = Gene.objects.annotate( symbol=Coalesce('standard_name', 'systematic_name')).filter( id__in=gene_ids).values_list('id', 'symbol') else: # A crossreference db? xrdb = CrossRefDB.objects.get(name=to_id) to_ids = CrossRef.objects.filter(crossrefdb=xrdb).values_list( 'gene__id', 'xrid') to_id_map = {} for item in to_ids: if not item[0] in to_id_map: to_id_map[item[0]] = [item[1], ] else: to_id_map[item[0]].append(item[1]) from_to = {} for item in ids: try: gene_id = from_id_map[item] except KeyError: not_found.add(item) continue to_id = to_id_map[gene_id] from_to[item] = to_id from_to['not_found'] = list(not_found) return from_to
[ "def", "translate_genes", "(", "id_list", "=", "None", ",", "from_id", "=", "None", ",", "to_id", "=", "None", ",", "organism", "=", "None", ")", ":", "ids", "=", "set", "(", "id_list", ")", "# Initialize set of identifiers not found by this translate_genes method.", "not_found", "=", "set", "(", ")", "from_ids", "=", "None", "# Get the map of from_ids to the gene pks", "if", "organism", "is", "not", "None", ":", "gene_objects_manager", "=", "Gene", ".", "objects", ".", "filter", "(", "organism__scientific_name", "=", "organism", ")", "else", ":", "gene_objects_manager", "=", "Gene", ".", "objects", "if", "(", "from_id", "==", "'Entrez'", ")", ":", "int_list", "=", "[", "]", "for", "x", "in", "ids", ":", "try", ":", "int_list", ".", "append", "(", "int", "(", "x", ")", ")", "except", "(", "ValueError", ")", ":", "not_found", ".", "add", "(", "x", ")", "ids", "=", "set", "(", "int_list", ")", "from_ids", "=", "gene_objects_manager", ".", "filter", "(", "entrezid__in", "=", "ids", ")", ".", "values_list", "(", "'entrezid'", ",", "'id'", ")", "elif", "(", "from_id", "==", "'Systematic name'", ")", ":", "from_ids", "=", "gene_objects_manager", ".", "filter", "(", "systematic_name__in", "=", "ids", ")", ".", "values_list", "(", "'systematic_name'", ",", "'id'", ")", "elif", "(", "from_id", "==", "'Standard name'", ")", ":", "from_ids", "=", "gene_objects_manager", ".", "filter", "(", "standard_name__in", "=", "ids", ")", ".", "values_list", "(", "'standard_name'", ",", "'id'", ")", "elif", "(", "from_id", "==", "'Symbol'", ")", ":", "# If standard_name exists, symbol will be standard_name; otherwise", "# symbol will be systematic_name.", "from_ids", "=", "gene_objects_manager", ".", "annotate", "(", "symbol", "=", "Coalesce", "(", "'standard_name'", ",", "'systematic_name'", ")", ")", ".", "filter", "(", "symbol__in", "=", "ids", ")", ".", "values_list", "(", "'symbol'", ",", "'id'", ")", "else", ":", "# a crossreference db?", "xrdb", "=", "CrossRefDB", ".", "objects", ".", "get", "(", "name", "=", "from_id", ")", "from_ids", "=", "CrossRef", ".", "objects", ".", "filter", "(", "crossrefdb", "=", "xrdb", ")", ".", "values_list", "(", "'xrid'", ",", "'gene__id'", ")", "# Dictionary that maps from type ID passed by user to gene__id.", "from_id_map", "=", "{", "}", "gene_ids", "=", "[", "]", "for", "item", "in", "from_ids", ":", "from_id_map", "[", "item", "[", "0", "]", "]", "=", "item", "[", "1", "]", "gene_ids", ".", "append", "(", "item", "[", "1", "]", ")", "# Now let's figure out what we need to go to:", "to_ids", "=", "None", "if", "(", "to_id", "==", "'Entrez'", ")", ":", "to_ids", "=", "Gene", ".", "objects", ".", "filter", "(", "id__in", "=", "gene_ids", ")", ".", "values_list", "(", "'id'", ",", "'entrezid'", ")", "elif", "(", "to_id", "==", "'Systematic name'", ")", ":", "to_ids", "=", "Gene", ".", "objects", ".", "filter", "(", "id__in", "=", "gene_ids", ")", ".", "values_list", "(", "'id'", ",", "'systematic_name'", ")", "elif", "(", "to_id", "==", "'Standard name'", ")", ":", "to_ids", "=", "Gene", ".", "objects", ".", "filter", "(", "id__in", "=", "gene_ids", ")", ".", "values_list", "(", "'id'", ",", "'standard_name'", ")", "elif", "(", "to_id", "==", "'Symbol'", ")", ":", "# If standard_name exists, symbol will be standard_name; otherwise", "# symbol will be systematic_name.", "to_ids", "=", "Gene", ".", "objects", ".", "annotate", "(", "symbol", "=", "Coalesce", "(", "'standard_name'", ",", "'systematic_name'", ")", ")", ".", "filter", "(", "id__in", "=", "gene_ids", ")", ".", "values_list", "(", "'id'", ",", "'symbol'", ")", "else", ":", "# A crossreference db?", "xrdb", "=", "CrossRefDB", ".", "objects", ".", "get", "(", "name", "=", "to_id", ")", "to_ids", "=", "CrossRef", ".", "objects", ".", "filter", "(", "crossrefdb", "=", "xrdb", ")", ".", "values_list", "(", "'gene__id'", ",", "'xrid'", ")", "to_id_map", "=", "{", "}", "for", "item", "in", "to_ids", ":", "if", "not", "item", "[", "0", "]", "in", "to_id_map", ":", "to_id_map", "[", "item", "[", "0", "]", "]", "=", "[", "item", "[", "1", "]", ",", "]", "else", ":", "to_id_map", "[", "item", "[", "0", "]", "]", ".", "append", "(", "item", "[", "1", "]", ")", "from_to", "=", "{", "}", "for", "item", "in", "ids", ":", "try", ":", "gene_id", "=", "from_id_map", "[", "item", "]", "except", "KeyError", ":", "not_found", ".", "add", "(", "item", ")", "continue", "to_id", "=", "to_id_map", "[", "gene_id", "]", "from_to", "[", "item", "]", "=", "to_id", "from_to", "[", "'not_found'", "]", "=", "list", "(", "not_found", ")", "return", "from_to" ]
Pass a list of identifiers (id_list), the name of the database ('Entrez', 'Symbol', 'Standard name', 'Systematic name' or a loaded crossreference database) that you wish to translate from, and the name of the database that you wish to translate to.
[ "Pass", "a", "list", "of", "identifiers", "(", "id_list", ")", "the", "name", "of", "the", "database", "(", "Entrez", "Symbol", "Standard", "name", "Systematic", "name", "or", "a", "loaded", "crossreference", "database", ")", "that", "you", "wish", "to", "translate", "from", "and", "the", "name", "of", "the", "database", "that", "you", "wish", "to", "translate", "to", "." ]
train
https://github.com/greenelab/django-genes/blob/298939adcb115031acfc11cfcef60ea0b596fae5/genes/utils.py#L5-L99
KnorrFG/pyparadigm
pyparadigm/surface_composition.py
_inner_func_anot
def _inner_func_anot(func): """must be applied to all inner functions that return contexts. Wraps all instances of pygame.Surface in the input in Surface""" @wraps(func) def new_func(*args): return func(*_lmap(_wrap_surface, args)) return new_func
python
def _inner_func_anot(func): """must be applied to all inner functions that return contexts. Wraps all instances of pygame.Surface in the input in Surface""" @wraps(func) def new_func(*args): return func(*_lmap(_wrap_surface, args)) return new_func
[ "def", "_inner_func_anot", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "new_func", "(", "*", "args", ")", ":", "return", "func", "(", "*", "_lmap", "(", "_wrap_surface", ",", "args", ")", ")", "return", "new_func" ]
must be applied to all inner functions that return contexts. Wraps all instances of pygame.Surface in the input in Surface
[ "must", "be", "applied", "to", "all", "inner", "functions", "that", "return", "contexts", ".", "Wraps", "all", "instances", "of", "pygame", ".", "Surface", "in", "the", "input", "in", "Surface" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L23-L30
KnorrFG/pyparadigm
pyparadigm/surface_composition.py
Cross
def Cross(width=3, color=0): """Draws a cross centered in the target area :param width: width of the lines of the cross in pixels :type width: int :param color: color of the lines of the cross :type color: pygame.Color """ return Overlay(Line("h", width, color), Line("v", width, color))
python
def Cross(width=3, color=0): """Draws a cross centered in the target area :param width: width of the lines of the cross in pixels :type width: int :param color: color of the lines of the cross :type color: pygame.Color """ return Overlay(Line("h", width, color), Line("v", width, color))
[ "def", "Cross", "(", "width", "=", "3", ",", "color", "=", "0", ")", ":", "return", "Overlay", "(", "Line", "(", "\"h\"", ",", "width", ",", "color", ")", ",", "Line", "(", "\"v\"", ",", "width", ",", "color", ")", ")" ]
Draws a cross centered in the target area :param width: width of the lines of the cross in pixels :type width: int :param color: color of the lines of the cross :type color: pygame.Color
[ "Draws", "a", "cross", "centered", "in", "the", "target", "area" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L381-L389
KnorrFG/pyparadigm
pyparadigm/surface_composition.py
compose
def compose(target, root=None): """Top level function to create a surface. :param target: the pygame.Surface to blit on. Or a (width, height) tuple in which case a new surface will be created :type target: - """ if type(root) == Surface: raise ValueError("A Surface may not be used as root, please add " +"it as a single child i.e. compose(...)(Surface(...))") @_inner_func_anot def inner_compose(*children): if root: root_context = root(*children) else: assert len(children) == 1 root_context = children[0] if type(target) == pygame.Surface: surface = target size = target.get_size() else: size = target surface = pygame.Surface(size) root_context._draw(surface, pygame.Rect(0, 0, *size)) return surface return inner_compose
python
def compose(target, root=None): """Top level function to create a surface. :param target: the pygame.Surface to blit on. Or a (width, height) tuple in which case a new surface will be created :type target: - """ if type(root) == Surface: raise ValueError("A Surface may not be used as root, please add " +"it as a single child i.e. compose(...)(Surface(...))") @_inner_func_anot def inner_compose(*children): if root: root_context = root(*children) else: assert len(children) == 1 root_context = children[0] if type(target) == pygame.Surface: surface = target size = target.get_size() else: size = target surface = pygame.Surface(size) root_context._draw(surface, pygame.Rect(0, 0, *size)) return surface return inner_compose
[ "def", "compose", "(", "target", ",", "root", "=", "None", ")", ":", "if", "type", "(", "root", ")", "==", "Surface", ":", "raise", "ValueError", "(", "\"A Surface may not be used as root, please add \"", "+", "\"it as a single child i.e. compose(...)(Surface(...))\"", ")", "@", "_inner_func_anot", "def", "inner_compose", "(", "*", "children", ")", ":", "if", "root", ":", "root_context", "=", "root", "(", "*", "children", ")", "else", ":", "assert", "len", "(", "children", ")", "==", "1", "root_context", "=", "children", "[", "0", "]", "if", "type", "(", "target", ")", "==", "pygame", ".", "Surface", ":", "surface", "=", "target", "size", "=", "target", ".", "get_size", "(", ")", "else", ":", "size", "=", "target", "surface", "=", "pygame", ".", "Surface", "(", "size", ")", "root_context", ".", "_draw", "(", "surface", ",", "pygame", ".", "Rect", "(", "0", ",", "0", ",", "*", "size", ")", ")", "return", "surface", "return", "inner_compose" ]
Top level function to create a surface. :param target: the pygame.Surface to blit on. Or a (width, height) tuple in which case a new surface will be created :type target: -
[ "Top", "level", "function", "to", "create", "a", "surface", ".", ":", "param", "target", ":", "the", "pygame", ".", "Surface", "to", "blit", "on", ".", "Or", "a", "(", "width", "height", ")", "tuple", "in", "which", "case", "a", "new", "surface", "will", "be", "created" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L483-L511
KnorrFG/pyparadigm
pyparadigm/surface_composition.py
Font
def Font(name=None, source="sys", italic=False, bold=False, size=20): """Unifies loading of fonts. :param name: name of system-font or filepath, if None is passed the default system-font is loaded :type name: str :param source: "sys" for system font, or "file" to load a file :type source: str """ assert source in ["sys", "file"] if not name: return pygame.font.SysFont(pygame.font.get_default_font(), size, bold=bold, italic=italic) if source == "sys": return pygame.font.SysFont(name, size, bold=bold, italic=italic) else: f = pygame.font.Font(name, size) f.set_italic(italic) f.set_bold(bold) return f
python
def Font(name=None, source="sys", italic=False, bold=False, size=20): """Unifies loading of fonts. :param name: name of system-font or filepath, if None is passed the default system-font is loaded :type name: str :param source: "sys" for system font, or "file" to load a file :type source: str """ assert source in ["sys", "file"] if not name: return pygame.font.SysFont(pygame.font.get_default_font(), size, bold=bold, italic=italic) if source == "sys": return pygame.font.SysFont(name, size, bold=bold, italic=italic) else: f = pygame.font.Font(name, size) f.set_italic(italic) f.set_bold(bold) return f
[ "def", "Font", "(", "name", "=", "None", ",", "source", "=", "\"sys\"", ",", "italic", "=", "False", ",", "bold", "=", "False", ",", "size", "=", "20", ")", ":", "assert", "source", "in", "[", "\"sys\"", ",", "\"file\"", "]", "if", "not", "name", ":", "return", "pygame", ".", "font", ".", "SysFont", "(", "pygame", ".", "font", ".", "get_default_font", "(", ")", ",", "size", ",", "bold", "=", "bold", ",", "italic", "=", "italic", ")", "if", "source", "==", "\"sys\"", ":", "return", "pygame", ".", "font", ".", "SysFont", "(", "name", ",", "size", ",", "bold", "=", "bold", ",", "italic", "=", "italic", ")", "else", ":", "f", "=", "pygame", ".", "font", ".", "Font", "(", "name", ",", "size", ")", "f", ".", "set_italic", "(", "italic", ")", "f", ".", "set_bold", "(", "bold", ")", "return", "f" ]
Unifies loading of fonts. :param name: name of system-font or filepath, if None is passed the default system-font is loaded :type name: str :param source: "sys" for system font, or "file" to load a file :type source: str
[ "Unifies", "loading", "of", "fonts", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L515-L536
KnorrFG/pyparadigm
pyparadigm/surface_composition.py
Text
def Text(text, font, color=pygame.Color(0, 0, 0), antialias=False, align="center"): """Renders a text. Supports multiline text, the background will be transparent. :param align: text-alignment must be "center", "left", or "righ" :type align: str :return: the input text :rtype: pygame.Surface """ assert align in ["center", "left", "right"] margin_l, margin_r = 1, 1 if align == "left": margin_l = 0 elif align == "right": margin_r = 0 margin = Margin(margin_l, margin_r) color_key = pygame.Color(0, 0, 1) if pygame.Color(0, 0, 1) != color else 0x000002 text_surfaces = _lmap(lambda text: _text(text, font=font, color=color, antialias=antialias), map(methodcaller("strip"), text.split("\n"))) w = max(surf.get_rect().w for surf in text_surfaces) h = sum(surf.get_rect().h for surf in text_surfaces) surf = compose((w, h), Fill(color_key))(LinLayout("v")( *_lmap(lambda s: Surface(margin)(s), text_surfaces))) surf.set_colorkey(color_key) return surf.convert_alpha()
python
def Text(text, font, color=pygame.Color(0, 0, 0), antialias=False, align="center"): """Renders a text. Supports multiline text, the background will be transparent. :param align: text-alignment must be "center", "left", or "righ" :type align: str :return: the input text :rtype: pygame.Surface """ assert align in ["center", "left", "right"] margin_l, margin_r = 1, 1 if align == "left": margin_l = 0 elif align == "right": margin_r = 0 margin = Margin(margin_l, margin_r) color_key = pygame.Color(0, 0, 1) if pygame.Color(0, 0, 1) != color else 0x000002 text_surfaces = _lmap(lambda text: _text(text, font=font, color=color, antialias=antialias), map(methodcaller("strip"), text.split("\n"))) w = max(surf.get_rect().w for surf in text_surfaces) h = sum(surf.get_rect().h for surf in text_surfaces) surf = compose((w, h), Fill(color_key))(LinLayout("v")( *_lmap(lambda s: Surface(margin)(s), text_surfaces))) surf.set_colorkey(color_key) return surf.convert_alpha()
[ "def", "Text", "(", "text", ",", "font", ",", "color", "=", "pygame", ".", "Color", "(", "0", ",", "0", ",", "0", ")", ",", "antialias", "=", "False", ",", "align", "=", "\"center\"", ")", ":", "assert", "align", "in", "[", "\"center\"", ",", "\"left\"", ",", "\"right\"", "]", "margin_l", ",", "margin_r", "=", "1", ",", "1", "if", "align", "==", "\"left\"", ":", "margin_l", "=", "0", "elif", "align", "==", "\"right\"", ":", "margin_r", "=", "0", "margin", "=", "Margin", "(", "margin_l", ",", "margin_r", ")", "color_key", "=", "pygame", ".", "Color", "(", "0", ",", "0", ",", "1", ")", "if", "pygame", ".", "Color", "(", "0", ",", "0", ",", "1", ")", "!=", "color", "else", "0x000002", "text_surfaces", "=", "_lmap", "(", "lambda", "text", ":", "_text", "(", "text", ",", "font", "=", "font", ",", "color", "=", "color", ",", "antialias", "=", "antialias", ")", ",", "map", "(", "methodcaller", "(", "\"strip\"", ")", ",", "text", ".", "split", "(", "\"\\n\"", ")", ")", ")", "w", "=", "max", "(", "surf", ".", "get_rect", "(", ")", ".", "w", "for", "surf", "in", "text_surfaces", ")", "h", "=", "sum", "(", "surf", ".", "get_rect", "(", ")", ".", "h", "for", "surf", "in", "text_surfaces", ")", "surf", "=", "compose", "(", "(", "w", ",", "h", ")", ",", "Fill", "(", "color_key", ")", ")", "(", "LinLayout", "(", "\"v\"", ")", "(", "*", "_lmap", "(", "lambda", "s", ":", "Surface", "(", "margin", ")", "(", "s", ")", ",", "text_surfaces", ")", ")", ")", "surf", ".", "set_colorkey", "(", "color_key", ")", "return", "surf", ".", "convert_alpha", "(", ")" ]
Renders a text. Supports multiline text, the background will be transparent. :param align: text-alignment must be "center", "left", or "righ" :type align: str :return: the input text :rtype: pygame.Surface
[ "Renders", "a", "text", ".", "Supports", "multiline", "text", "the", "background", "will", "be", "transparent", ".", ":", "param", "align", ":", "text", "-", "alignment", "must", "be", "center", "left", "or", "righ", ":", "type", "align", ":", "str", ":", "return", ":", "the", "input", "text", ":", "rtype", ":", "pygame", ".", "Surface" ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L543-L566
KnorrFG/pyparadigm
pyparadigm/surface_composition.py
Padding.from_scale
def from_scale(scale_w, scale_h=None): """Creates a padding by the remaining space after scaling the content. E.g. Padding.from_scale(0.5) would produce Padding(0.25, 0.25, 0.25, 0.25) and Padding.from_scale(0.5, 1) would produce Padding(0.25, 0.25, 0, 0) because the content would not be scaled (since scale_h=1) and therefore there would be no vertical padding. If scale_h is not specified scale_h=scale_w is used as default :param scale_w: horizontal scaling factors :type scale_w: float :param scale_h: vertical scaling factor :type scale_h: float """ if not scale_h: scale_h = scale_w w_padding = [(1 - scale_w) * 0.5] * 2 h_padding = [(1 - scale_h) * 0.5] * 2 return Padding(*w_padding, *h_padding)
python
def from_scale(scale_w, scale_h=None): """Creates a padding by the remaining space after scaling the content. E.g. Padding.from_scale(0.5) would produce Padding(0.25, 0.25, 0.25, 0.25) and Padding.from_scale(0.5, 1) would produce Padding(0.25, 0.25, 0, 0) because the content would not be scaled (since scale_h=1) and therefore there would be no vertical padding. If scale_h is not specified scale_h=scale_w is used as default :param scale_w: horizontal scaling factors :type scale_w: float :param scale_h: vertical scaling factor :type scale_h: float """ if not scale_h: scale_h = scale_w w_padding = [(1 - scale_w) * 0.5] * 2 h_padding = [(1 - scale_h) * 0.5] * 2 return Padding(*w_padding, *h_padding)
[ "def", "from_scale", "(", "scale_w", ",", "scale_h", "=", "None", ")", ":", "if", "not", "scale_h", ":", "scale_h", "=", "scale_w", "w_padding", "=", "[", "(", "1", "-", "scale_w", ")", "*", "0.5", "]", "*", "2", "h_padding", "=", "[", "(", "1", "-", "scale_h", ")", "*", "0.5", "]", "*", "2", "return", "Padding", "(", "*", "w_padding", ",", "*", "h_padding", ")" ]
Creates a padding by the remaining space after scaling the content. E.g. Padding.from_scale(0.5) would produce Padding(0.25, 0.25, 0.25, 0.25) and Padding.from_scale(0.5, 1) would produce Padding(0.25, 0.25, 0, 0) because the content would not be scaled (since scale_h=1) and therefore there would be no vertical padding. If scale_h is not specified scale_h=scale_w is used as default :param scale_w: horizontal scaling factors :type scale_w: float :param scale_h: vertical scaling factor :type scale_h: float
[ "Creates", "a", "padding", "by", "the", "remaining", "space", "after", "scaling", "the", "content", "." ]
train
https://github.com/KnorrFG/pyparadigm/blob/69944cdf3ce2f6414ae1aa1d27a0d8c6e5fb3fd3/pyparadigm/surface_composition.py#L264-L282
kfdm/wanikani
wanikani/core.py
WaniKani.radicals
def radicals(self, levels=None): """ :param levels string: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. http://www.wanikani.com/api/v1.2#radicals-list """ url = WANIKANI_BASE.format(self.api_key, 'radicals') if levels: url += '/{0}'.format(levels) data = self.get(url) for item in data['requested_information']: yield Radical(item)
python
def radicals(self, levels=None): """ :param levels string: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. http://www.wanikani.com/api/v1.2#radicals-list """ url = WANIKANI_BASE.format(self.api_key, 'radicals') if levels: url += '/{0}'.format(levels) data = self.get(url) for item in data['requested_information']: yield Radical(item)
[ "def", "radicals", "(", "self", ",", "levels", "=", "None", ")", ":", "url", "=", "WANIKANI_BASE", ".", "format", "(", "self", ".", "api_key", ",", "'radicals'", ")", "if", "levels", ":", "url", "+=", "'/{0}'", ".", "format", "(", "levels", ")", "data", "=", "self", ".", "get", "(", "url", ")", "for", "item", "in", "data", "[", "'requested_information'", "]", ":", "yield", "Radical", "(", "item", ")" ]
:param levels string: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. http://www.wanikani.com/api/v1.2#radicals-list
[ ":", "param", "levels", "string", ":", "An", "optional", "argument", "of", "declaring", "a", "single", "or", "comma", "-", "delimited", "list", "of", "levels", "is", "available", "as", "seen", "in", "the", "example", "as", "1", ".", "An", "example", "of", "a", "comma", "-", "delimited", "list", "of", "levels", "is", "1", "2", "5", "9", "." ]
train
https://github.com/kfdm/wanikani/blob/209f9b34b2832c2b9c9b12077f4a4382c047f710/wanikani/core.py#L168-L182
kfdm/wanikani
wanikani/core.py
WaniKani.kanji
def kanji(self, levels=None): """ :param levels: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. :type levels: str or None http://www.wanikani.com/api/v1.2#kanji-list """ url = WANIKANI_BASE.format(self.api_key, 'kanji') if levels: url += '/{0}'.format(levels) data = self.get(url) for item in data['requested_information']: yield Kanji(item)
python
def kanji(self, levels=None): """ :param levels: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. :type levels: str or None http://www.wanikani.com/api/v1.2#kanji-list """ url = WANIKANI_BASE.format(self.api_key, 'kanji') if levels: url += '/{0}'.format(levels) data = self.get(url) for item in data['requested_information']: yield Kanji(item)
[ "def", "kanji", "(", "self", ",", "levels", "=", "None", ")", ":", "url", "=", "WANIKANI_BASE", ".", "format", "(", "self", ".", "api_key", ",", "'kanji'", ")", "if", "levels", ":", "url", "+=", "'/{0}'", ".", "format", "(", "levels", ")", "data", "=", "self", ".", "get", "(", "url", ")", "for", "item", "in", "data", "[", "'requested_information'", "]", ":", "yield", "Kanji", "(", "item", ")" ]
:param levels: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. :type levels: str or None http://www.wanikani.com/api/v1.2#kanji-list
[ ":", "param", "levels", ":", "An", "optional", "argument", "of", "declaring", "a", "single", "or", "comma", "-", "delimited", "list", "of", "levels", "is", "available", "as", "seen", "in", "the", "example", "as", "1", ".", "An", "example", "of", "a", "comma", "-", "delimited", "list", "of", "levels", "is", "1", "2", "5", "9", ".", ":", "type", "levels", ":", "str", "or", "None" ]
train
https://github.com/kfdm/wanikani/blob/209f9b34b2832c2b9c9b12077f4a4382c047f710/wanikani/core.py#L184-L199
kfdm/wanikani
wanikani/core.py
WaniKani.vocabulary
def vocabulary(self, levels=None): """ :param levels: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. :type levels: str or None http://www.wanikani.com/api/v1.2#vocabulary-list """ url = WANIKANI_BASE.format(self.api_key, 'vocabulary') if levels: url += '/{0}'.format(levels) data = self.get(url) if 'general' in data['requested_information']: for item in data['requested_information']['general']: yield Vocabulary(item) else: for item in data['requested_information']: yield Vocabulary(item)
python
def vocabulary(self, levels=None): """ :param levels: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. :type levels: str or None http://www.wanikani.com/api/v1.2#vocabulary-list """ url = WANIKANI_BASE.format(self.api_key, 'vocabulary') if levels: url += '/{0}'.format(levels) data = self.get(url) if 'general' in data['requested_information']: for item in data['requested_information']['general']: yield Vocabulary(item) else: for item in data['requested_information']: yield Vocabulary(item)
[ "def", "vocabulary", "(", "self", ",", "levels", "=", "None", ")", ":", "url", "=", "WANIKANI_BASE", ".", "format", "(", "self", ".", "api_key", ",", "'vocabulary'", ")", "if", "levels", ":", "url", "+=", "'/{0}'", ".", "format", "(", "levels", ")", "data", "=", "self", ".", "get", "(", "url", ")", "if", "'general'", "in", "data", "[", "'requested_information'", "]", ":", "for", "item", "in", "data", "[", "'requested_information'", "]", "[", "'general'", "]", ":", "yield", "Vocabulary", "(", "item", ")", "else", ":", "for", "item", "in", "data", "[", "'requested_information'", "]", ":", "yield", "Vocabulary", "(", "item", ")" ]
:param levels: An optional argument of declaring a single or comma-delimited list of levels is available, as seen in the example as 1. An example of a comma-delimited list of levels is 1,2,5,9. :type levels: str or None http://www.wanikani.com/api/v1.2#vocabulary-list
[ ":", "param", "levels", ":", "An", "optional", "argument", "of", "declaring", "a", "single", "or", "comma", "-", "delimited", "list", "of", "levels", "is", "available", "as", "seen", "in", "the", "example", "as", "1", ".", "An", "example", "of", "a", "comma", "-", "delimited", "list", "of", "levels", "is", "1", "2", "5", "9", ".", ":", "type", "levels", ":", "str", "or", "None" ]
train
https://github.com/kfdm/wanikani/blob/209f9b34b2832c2b9c9b12077f4a4382c047f710/wanikani/core.py#L202-L222
pmacosta/pmisc
pmisc/member.py
ishex
def ishex(obj): """ Test if the argument is a string representing a valid hexadecimal digit. :param obj: Object :type obj: any :rtype: boolean """ return isinstance(obj, str) and (len(obj) == 1) and (obj in string.hexdigits)
python
def ishex(obj): """ Test if the argument is a string representing a valid hexadecimal digit. :param obj: Object :type obj: any :rtype: boolean """ return isinstance(obj, str) and (len(obj) == 1) and (obj in string.hexdigits)
[ "def", "ishex", "(", "obj", ")", ":", "return", "isinstance", "(", "obj", ",", "str", ")", "and", "(", "len", "(", "obj", ")", "==", "1", ")", "and", "(", "obj", "in", "string", ".", "hexdigits", ")" ]
Test if the argument is a string representing a valid hexadecimal digit. :param obj: Object :type obj: any :rtype: boolean
[ "Test", "if", "the", "argument", "is", "a", "string", "representing", "a", "valid", "hexadecimal", "digit", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/member.py#L40-L49
pmacosta/pmisc
pmisc/member.py
isnumber
def isnumber(obj): """ Test if the argument is a number (complex, float or integer). :param obj: Object :type obj: any :rtype: boolean """ return ( (obj is not None) and (not isinstance(obj, bool)) and isinstance(obj, (int, float, complex)) )
python
def isnumber(obj): """ Test if the argument is a number (complex, float or integer). :param obj: Object :type obj: any :rtype: boolean """ return ( (obj is not None) and (not isinstance(obj, bool)) and isinstance(obj, (int, float, complex)) )
[ "def", "isnumber", "(", "obj", ")", ":", "return", "(", "(", "obj", "is", "not", "None", ")", "and", "(", "not", "isinstance", "(", "obj", ",", "bool", ")", ")", "and", "isinstance", "(", "obj", ",", "(", "int", ",", "float", ",", "complex", ")", ")", ")" ]
Test if the argument is a number (complex, float or integer). :param obj: Object :type obj: any :rtype: boolean
[ "Test", "if", "the", "argument", "is", "a", "number", "(", "complex", "float", "or", "integer", ")", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/member.py#L68-L81
pmacosta/pmisc
pmisc/member.py
isreal
def isreal(obj): """ Test if the argument is a real number (float or integer). :param obj: Object :type obj: any :rtype: boolean """ return ( (obj is not None) and (not isinstance(obj, bool)) and isinstance(obj, (int, float)) )
python
def isreal(obj): """ Test if the argument is a real number (float or integer). :param obj: Object :type obj: any :rtype: boolean """ return ( (obj is not None) and (not isinstance(obj, bool)) and isinstance(obj, (int, float)) )
[ "def", "isreal", "(", "obj", ")", ":", "return", "(", "(", "obj", "is", "not", "None", ")", "and", "(", "not", "isinstance", "(", "obj", ",", "bool", ")", ")", "and", "isinstance", "(", "obj", ",", "(", "int", ",", "float", ")", ")", ")" ]
Test if the argument is a real number (float or integer). :param obj: Object :type obj: any :rtype: boolean
[ "Test", "if", "the", "argument", "is", "a", "real", "number", "(", "float", "or", "integer", ")", "." ]
train
https://github.com/pmacosta/pmisc/blob/dd2bb32e59eee872f1ef2db2d9921a396ab9f50b/pmisc/member.py#L84-L97
RobotStudio/bors
bors/app/builder.py
AppBuilder.create_api_context
def create_api_context(self, cls): """Create and return an API context""" return self.api_context_schema().load({ "name": cls.name, "cls": cls, "inst": [], "conf": self.conf.get_api_service(cls.name), "calls": self.conf.get_api_calls(), "shared": {}, # Used per-API to monitor state "log_level": self.conf.get_log_level(), "callback": self.receive })
python
def create_api_context(self, cls): """Create and return an API context""" return self.api_context_schema().load({ "name": cls.name, "cls": cls, "inst": [], "conf": self.conf.get_api_service(cls.name), "calls": self.conf.get_api_calls(), "shared": {}, # Used per-API to monitor state "log_level": self.conf.get_log_level(), "callback": self.receive })
[ "def", "create_api_context", "(", "self", ",", "cls", ")", ":", "return", "self", ".", "api_context_schema", "(", ")", ".", "load", "(", "{", "\"name\"", ":", "cls", ".", "name", ",", "\"cls\"", ":", "cls", ",", "\"inst\"", ":", "[", "]", ",", "\"conf\"", ":", "self", ".", "conf", ".", "get_api_service", "(", "cls", ".", "name", ")", ",", "\"calls\"", ":", "self", ".", "conf", ".", "get_api_calls", "(", ")", ",", "\"shared\"", ":", "{", "}", ",", "# Used per-API to monitor state", "\"log_level\"", ":", "self", ".", "conf", ".", "get_log_level", "(", ")", ",", "\"callback\"", ":", "self", ".", "receive", "}", ")" ]
Create and return an API context
[ "Create", "and", "return", "an", "API", "context" ]
train
https://github.com/RobotStudio/bors/blob/38bf338fc6905d90819faa56bd832140116720f0/bors/app/builder.py#L38-L49
RobotStudio/bors
bors/app/builder.py
AppBuilder.receive
def receive(self, data, api_context): """Pass an API result down the pipeline""" self.log.debug(f"Putting data on the pipeline: {data}") result = { "api_contexts": self.api_contexts, "api_context": api_context, "strategy": dict(), # Shared strategy data "result": data, "log_level": api_context["log_level"], } self.strat.execute(self.strategy_context_schema().load(result).data)
python
def receive(self, data, api_context): """Pass an API result down the pipeline""" self.log.debug(f"Putting data on the pipeline: {data}") result = { "api_contexts": self.api_contexts, "api_context": api_context, "strategy": dict(), # Shared strategy data "result": data, "log_level": api_context["log_level"], } self.strat.execute(self.strategy_context_schema().load(result).data)
[ "def", "receive", "(", "self", ",", "data", ",", "api_context", ")", ":", "self", ".", "log", ".", "debug", "(", "f\"Putting data on the pipeline: {data}\"", ")", "result", "=", "{", "\"api_contexts\"", ":", "self", ".", "api_contexts", ",", "\"api_context\"", ":", "api_context", ",", "\"strategy\"", ":", "dict", "(", ")", ",", "# Shared strategy data", "\"result\"", ":", "data", ",", "\"log_level\"", ":", "api_context", "[", "\"log_level\"", "]", ",", "}", "self", ".", "strat", ".", "execute", "(", "self", ".", "strategy_context_schema", "(", ")", ".", "load", "(", "result", ")", ".", "data", ")" ]
Pass an API result down the pipeline
[ "Pass", "an", "API", "result", "down", "the", "pipeline" ]
train
https://github.com/RobotStudio/bors/blob/38bf338fc6905d90819faa56bd832140116720f0/bors/app/builder.py#L56-L66
RobotStudio/bors
bors/app/builder.py
AppBuilder.shutdown
def shutdown(self, signum, frame): # pylint: disable=unused-argument """Shut it down""" if not self.exit: self.exit = True self.log.debug(f"SIGTRAP!{signum};{frame}") self.api.shutdown() self.strat.shutdown()
python
def shutdown(self, signum, frame): # pylint: disable=unused-argument """Shut it down""" if not self.exit: self.exit = True self.log.debug(f"SIGTRAP!{signum};{frame}") self.api.shutdown() self.strat.shutdown()
[ "def", "shutdown", "(", "self", ",", "signum", ",", "frame", ")", ":", "# pylint: disable=unused-argument", "if", "not", "self", ".", "exit", ":", "self", ".", "exit", "=", "True", "self", ".", "log", ".", "debug", "(", "f\"SIGTRAP!{signum};{frame}\"", ")", "self", ".", "api", ".", "shutdown", "(", ")", "self", ".", "strat", ".", "shutdown", "(", ")" ]
Shut it down
[ "Shut", "it", "down" ]
train
https://github.com/RobotStudio/bors/blob/38bf338fc6905d90819faa56bd832140116720f0/bors/app/builder.py#L68-L74
CogSciUOS/StudDP
studdp/model.py
BaseNode.course
def course(self): """ Course this node belongs to """ course = self.parent while course.parent: course = course.parent return course
python
def course(self): """ Course this node belongs to """ course = self.parent while course.parent: course = course.parent return course
[ "def", "course", "(", "self", ")", ":", "course", "=", "self", ".", "parent", "while", "course", ".", "parent", ":", "course", "=", "course", ".", "parent", "return", "course" ]
Course this node belongs to
[ "Course", "this", "node", "belongs", "to" ]
train
https://github.com/CogSciUOS/StudDP/blob/e953aea51766438f2901c9e87f5b7b9e5bb892f5/studdp/model.py#L45-L52
CogSciUOS/StudDP
studdp/model.py
BaseNode.path
def path(self): """ Path of this node on Studip. Looks like Coures/folder/folder/document. Respects the renaming policies defined in the namemap """ if self.parent is None: return self.title return join(self.parent.path, self.title)
python
def path(self): """ Path of this node on Studip. Looks like Coures/folder/folder/document. Respects the renaming policies defined in the namemap """ if self.parent is None: return self.title return join(self.parent.path, self.title)
[ "def", "path", "(", "self", ")", ":", "if", "self", ".", "parent", "is", "None", ":", "return", "self", ".", "title", "return", "join", "(", "self", ".", "parent", ".", "path", ",", "self", ".", "title", ")" ]
Path of this node on Studip. Looks like Coures/folder/folder/document. Respects the renaming policies defined in the namemap
[ "Path", "of", "this", "node", "on", "Studip", ".", "Looks", "like", "Coures", "/", "folder", "/", "folder", "/", "document", ".", "Respects", "the", "renaming", "policies", "defined", "in", "the", "namemap" ]
train
https://github.com/CogSciUOS/StudDP/blob/e953aea51766438f2901c9e87f5b7b9e5bb892f5/studdp/model.py#L55-L61
CogSciUOS/StudDP
studdp/model.py
BaseNode.title
def title(self): """ get title of this node. If an entry for this course is found in the configuration namemap it is used, otherwise the default value from stud.ip is used. """ tmp = c.namemap_lookup(self.id) if c.namemap_lookup(self.id) is not None else self._title return secure_filename(tmp)
python
def title(self): """ get title of this node. If an entry for this course is found in the configuration namemap it is used, otherwise the default value from stud.ip is used. """ tmp = c.namemap_lookup(self.id) if c.namemap_lookup(self.id) is not None else self._title return secure_filename(tmp)
[ "def", "title", "(", "self", ")", ":", "tmp", "=", "c", ".", "namemap_lookup", "(", "self", ".", "id", ")", "if", "c", ".", "namemap_lookup", "(", "self", ".", "id", ")", "is", "not", "None", "else", "self", ".", "_title", "return", "secure_filename", "(", "tmp", ")" ]
get title of this node. If an entry for this course is found in the configuration namemap it is used, otherwise the default value from stud.ip is used.
[ "get", "title", "of", "this", "node", ".", "If", "an", "entry", "for", "this", "course", "is", "found", "in", "the", "configuration", "namemap", "it", "is", "used", "otherwise", "the", "default", "value", "from", "stud", ".", "ip", "is", "used", "." ]
train
https://github.com/CogSciUOS/StudDP/blob/e953aea51766438f2901c9e87f5b7b9e5bb892f5/studdp/model.py#L64-L70
CogSciUOS/StudDP
studdp/model.py
Folder.deep_documents
def deep_documents(self): """ list of all documents find in subtrees of this node """ tree = [] for entry in self.contents: if isinstance(entry, Document): tree.append(entry) else: tree += entry.deep_documents return tree
python
def deep_documents(self): """ list of all documents find in subtrees of this node """ tree = [] for entry in self.contents: if isinstance(entry, Document): tree.append(entry) else: tree += entry.deep_documents return tree
[ "def", "deep_documents", "(", "self", ")", ":", "tree", "=", "[", "]", "for", "entry", "in", "self", ".", "contents", ":", "if", "isinstance", "(", "entry", ",", "Document", ")", ":", "tree", ".", "append", "(", "entry", ")", "else", ":", "tree", "+=", "entry", ".", "deep_documents", "return", "tree" ]
list of all documents find in subtrees of this node
[ "list", "of", "all", "documents", "find", "in", "subtrees", "of", "this", "node" ]
train
https://github.com/CogSciUOS/StudDP/blob/e953aea51766438f2901c9e87f5b7b9e5bb892f5/studdp/model.py#L90-L100
CogSciUOS/StudDP
studdp/model.py
Course.title
def title(self): """ The title of the course. If no entry in the namemap of the configuration is found a new entry is created with name=$STUD.IP_NAME + $SEMESTER_NAME """ name = c.namemap_lookup(self.id) if name is None: name = self._title + " " + client.get_semester_title(self) c.namemap_set(self.id, name) return secure_filename(name)
python
def title(self): """ The title of the course. If no entry in the namemap of the configuration is found a new entry is created with name=$STUD.IP_NAME + $SEMESTER_NAME """ name = c.namemap_lookup(self.id) if name is None: name = self._title + " " + client.get_semester_title(self) c.namemap_set(self.id, name) return secure_filename(name)
[ "def", "title", "(", "self", ")", ":", "name", "=", "c", ".", "namemap_lookup", "(", "self", ".", "id", ")", "if", "name", "is", "None", ":", "name", "=", "self", ".", "_title", "+", "\" \"", "+", "client", ".", "get_semester_title", "(", "self", ")", "c", ".", "namemap_set", "(", "self", ".", "id", ",", "name", ")", "return", "secure_filename", "(", "name", ")" ]
The title of the course. If no entry in the namemap of the configuration is found a new entry is created with name=$STUD.IP_NAME + $SEMESTER_NAME
[ "The", "title", "of", "the", "course", ".", "If", "no", "entry", "in", "the", "namemap", "of", "the", "configuration", "is", "found", "a", "new", "entry", "is", "created", "with", "name", "=", "$STUD", ".", "IP_NAME", "+", "$SEMESTER_NAME" ]
train
https://github.com/CogSciUOS/StudDP/blob/e953aea51766438f2901c9e87f5b7b9e5bb892f5/studdp/model.py#L114-L122