id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
243,100
jpablo128/simplystatic
bin/s2.py
do_rename
def do_rename(argdict): '''Rename a page.''' site = make_site_obj(argdict) slug = argdict['slug'] newtitle = argdict['newtitle'] try: site.rename_page(slug, newtitle) print "Renamed page." except ValueError: # pragma: no cover print "Cannot rename. A page with the given slug does not exist." sys.exit()
python
def do_rename(argdict): '''Rename a page.''' site = make_site_obj(argdict) slug = argdict['slug'] newtitle = argdict['newtitle'] try: site.rename_page(slug, newtitle) print "Renamed page." except ValueError: # pragma: no cover print "Cannot rename. A page with the given slug does not exist." sys.exit()
[ "def", "do_rename", "(", "argdict", ")", ":", "site", "=", "make_site_obj", "(", "argdict", ")", "slug", "=", "argdict", "[", "'slug'", "]", "newtitle", "=", "argdict", "[", "'newtitle'", "]", "try", ":", "site", ".", "rename_page", "(", "slug", ",", "newtitle", ")", "print", "\"Renamed page.\"", "except", "ValueError", ":", "# pragma: no cover", "print", "\"Cannot rename. A page with the given slug does not exist.\"", "sys", ".", "exit", "(", ")" ]
Rename a page.
[ "Rename", "a", "page", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/bin/s2.py#L156-L166
243,101
jpablo128/simplystatic
bin/s2.py
do_gen
def do_gen(argdict): '''Generate the whole site.''' site = make_site_obj(argdict) try: st = time.time() site.generate() et = time.time() print "Generated Site in %f seconds."% (et-st) except ValueError as e: # pragma: no cover print "Cannot generate. You are not within a simplystatic \ tree and you didn't specify a directory."
python
def do_gen(argdict): '''Generate the whole site.''' site = make_site_obj(argdict) try: st = time.time() site.generate() et = time.time() print "Generated Site in %f seconds."% (et-st) except ValueError as e: # pragma: no cover print "Cannot generate. You are not within a simplystatic \ tree and you didn't specify a directory."
[ "def", "do_gen", "(", "argdict", ")", ":", "site", "=", "make_site_obj", "(", "argdict", ")", "try", ":", "st", "=", "time", ".", "time", "(", ")", "site", ".", "generate", "(", ")", "et", "=", "time", ".", "time", "(", ")", "print", "\"Generated Site in %f seconds.\"", "%", "(", "et", "-", "st", ")", "except", "ValueError", "as", "e", ":", "# pragma: no cover", "print", "\"Cannot generate. You are not within a simplystatic \\\ntree and you didn't specify a directory.\"" ]
Generate the whole site.
[ "Generate", "the", "whole", "site", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/bin/s2.py#L168-L178
243,102
jpablo128/simplystatic
bin/s2.py
do_ls
def do_ls(argdict): '''List pages.''' site = make_site_obj(argdict) if not site.tree_ready: print "Cannot list pages. You are not within a simplystatic \ tree and you didn't specify a directory." sys.exit() drafts = argdict['drafts'] recent = argdict['recent'] dir = site.dirs['source'] r = site.get_page_names() if drafts: fr = [os.path.join(dir,pn , pn+".md") for pn in r] cpat = re.compile('status:\s+draft') #compiled pattern i=0 print '' for f in fr: fcontent = open(f,'r').read().lower() res = cpat.search(fcontent) if res: print r[i] i += 1 print '' else: if recent: fr = [os.path.join(dir,pn , pn+".md") for pn in r] bmt = 0 i = 0 for f in fr: mt = os.path.getmtime(f) if mt > bmt: bmt = mt fname = r[i] i += 1 print '\n' + fname + '\n' else: print '\n' + '\n'.join(r) + '\n'
python
def do_ls(argdict): '''List pages.''' site = make_site_obj(argdict) if not site.tree_ready: print "Cannot list pages. You are not within a simplystatic \ tree and you didn't specify a directory." sys.exit() drafts = argdict['drafts'] recent = argdict['recent'] dir = site.dirs['source'] r = site.get_page_names() if drafts: fr = [os.path.join(dir,pn , pn+".md") for pn in r] cpat = re.compile('status:\s+draft') #compiled pattern i=0 print '' for f in fr: fcontent = open(f,'r').read().lower() res = cpat.search(fcontent) if res: print r[i] i += 1 print '' else: if recent: fr = [os.path.join(dir,pn , pn+".md") for pn in r] bmt = 0 i = 0 for f in fr: mt = os.path.getmtime(f) if mt > bmt: bmt = mt fname = r[i] i += 1 print '\n' + fname + '\n' else: print '\n' + '\n'.join(r) + '\n'
[ "def", "do_ls", "(", "argdict", ")", ":", "site", "=", "make_site_obj", "(", "argdict", ")", "if", "not", "site", ".", "tree_ready", ":", "print", "\"Cannot list pages. You are not within a simplystatic \\\ntree and you didn't specify a directory.\"", "sys", ".", "exit", "(", ")", "drafts", "=", "argdict", "[", "'drafts'", "]", "recent", "=", "argdict", "[", "'recent'", "]", "dir", "=", "site", ".", "dirs", "[", "'source'", "]", "r", "=", "site", ".", "get_page_names", "(", ")", "if", "drafts", ":", "fr", "=", "[", "os", ".", "path", ".", "join", "(", "dir", ",", "pn", ",", "pn", "+", "\".md\"", ")", "for", "pn", "in", "r", "]", "cpat", "=", "re", ".", "compile", "(", "'status:\\s+draft'", ")", "#compiled pattern", "i", "=", "0", "print", "''", "for", "f", "in", "fr", ":", "fcontent", "=", "open", "(", "f", ",", "'r'", ")", ".", "read", "(", ")", ".", "lower", "(", ")", "res", "=", "cpat", ".", "search", "(", "fcontent", ")", "if", "res", ":", "print", "r", "[", "i", "]", "i", "+=", "1", "print", "''", "else", ":", "if", "recent", ":", "fr", "=", "[", "os", ".", "path", ".", "join", "(", "dir", ",", "pn", ",", "pn", "+", "\".md\"", ")", "for", "pn", "in", "r", "]", "bmt", "=", "0", "i", "=", "0", "for", "f", "in", "fr", ":", "mt", "=", "os", ".", "path", ".", "getmtime", "(", "f", ")", "if", "mt", ">", "bmt", ":", "bmt", "=", "mt", "fname", "=", "r", "[", "i", "]", "i", "+=", "1", "print", "'\\n'", "+", "fname", "+", "'\\n'", "else", ":", "print", "'\\n'", "+", "'\\n'", ".", "join", "(", "r", ")", "+", "'\\n'" ]
List pages.
[ "List", "pages", "." ]
91ac579c8f34fa240bef9b87adb0116c6b40b24d
https://github.com/jpablo128/simplystatic/blob/91ac579c8f34fa240bef9b87adb0116c6b40b24d/bin/s2.py#L180-L217
243,103
b3j0f/schema
b3j0f/schema/registry.py
SchemaRegistry.register
def register(self, schema): """Register input schema class. When registering a schema, all inner schemas are registered as well. :param Schema schema: schema to register. :return: old registered schema. :rtype: type """ result = None uuid = schema.uuid if uuid in self._schbyuuid: result = self._schbyuuid[uuid] if result != schema: self._schbyuuid[uuid] = schema name = schema.name schemas = self._schbyname.setdefault(name, set()) schemas.add(schema) for innername, innerschema in iteritems(schema.getschemas()): if innerschema.uuid not in self._schbyuuid: register(innerschema) return result
python
def register(self, schema): """Register input schema class. When registering a schema, all inner schemas are registered as well. :param Schema schema: schema to register. :return: old registered schema. :rtype: type """ result = None uuid = schema.uuid if uuid in self._schbyuuid: result = self._schbyuuid[uuid] if result != schema: self._schbyuuid[uuid] = schema name = schema.name schemas = self._schbyname.setdefault(name, set()) schemas.add(schema) for innername, innerschema in iteritems(schema.getschemas()): if innerschema.uuid not in self._schbyuuid: register(innerschema) return result
[ "def", "register", "(", "self", ",", "schema", ")", ":", "result", "=", "None", "uuid", "=", "schema", ".", "uuid", "if", "uuid", "in", "self", ".", "_schbyuuid", ":", "result", "=", "self", ".", "_schbyuuid", "[", "uuid", "]", "if", "result", "!=", "schema", ":", "self", ".", "_schbyuuid", "[", "uuid", "]", "=", "schema", "name", "=", "schema", ".", "name", "schemas", "=", "self", ".", "_schbyname", ".", "setdefault", "(", "name", ",", "set", "(", ")", ")", "schemas", ".", "add", "(", "schema", ")", "for", "innername", ",", "innerschema", "in", "iteritems", "(", "schema", ".", "getschemas", "(", ")", ")", ":", "if", "innerschema", ".", "uuid", "not", "in", "self", ".", "_schbyuuid", ":", "register", "(", "innerschema", ")", "return", "result" ]
Register input schema class. When registering a schema, all inner schemas are registered as well. :param Schema schema: schema to register. :return: old registered schema. :rtype: type
[ "Register", "input", "schema", "class", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/registry.py#L69-L100
243,104
b3j0f/schema
b3j0f/schema/registry.py
SchemaRegistry.registercls
def registercls(self, data_types, schemacls=None): """Register schema class with associated data_types. Can be used such as a decorator. :param list data_types: data types to associate with schema class. :param type schemacls: schema class to register. :return: schemacls. :rtype: type """ if schemacls is None: return lambda schemacls: self.registercls( data_types=data_types, schemacls=schemacls ) for data_type in data_types: self._schbytype[data_type] = schemacls return schemacls
python
def registercls(self, data_types, schemacls=None): """Register schema class with associated data_types. Can be used such as a decorator. :param list data_types: data types to associate with schema class. :param type schemacls: schema class to register. :return: schemacls. :rtype: type """ if schemacls is None: return lambda schemacls: self.registercls( data_types=data_types, schemacls=schemacls ) for data_type in data_types: self._schbytype[data_type] = schemacls return schemacls
[ "def", "registercls", "(", "self", ",", "data_types", ",", "schemacls", "=", "None", ")", ":", "if", "schemacls", "is", "None", ":", "return", "lambda", "schemacls", ":", "self", ".", "registercls", "(", "data_types", "=", "data_types", ",", "schemacls", "=", "schemacls", ")", "for", "data_type", "in", "data_types", ":", "self", ".", "_schbytype", "[", "data_type", "]", "=", "schemacls", "return", "schemacls" ]
Register schema class with associated data_types. Can be used such as a decorator. :param list data_types: data types to associate with schema class. :param type schemacls: schema class to register. :return: schemacls. :rtype: type
[ "Register", "schema", "class", "with", "associated", "data_types", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/registry.py#L102-L120
243,105
b3j0f/schema
b3j0f/schema/registry.py
SchemaRegistry.unregister
def unregister(self, uuid): """Unregister a schema registered with input uuid. :raises: KeyError if uuid is not already registered. """ schema = self._schbyuuid.pop(uuid) # clean schemas by name self._schbyname[schema.name].remove(schema) if not self._schbyname[schema.name]: del self._schbyname[schema.name]
python
def unregister(self, uuid): """Unregister a schema registered with input uuid. :raises: KeyError if uuid is not already registered. """ schema = self._schbyuuid.pop(uuid) # clean schemas by name self._schbyname[schema.name].remove(schema) if not self._schbyname[schema.name]: del self._schbyname[schema.name]
[ "def", "unregister", "(", "self", ",", "uuid", ")", ":", "schema", "=", "self", ".", "_schbyuuid", ".", "pop", "(", "uuid", ")", "# clean schemas by name", "self", ".", "_schbyname", "[", "schema", ".", "name", "]", ".", "remove", "(", "schema", ")", "if", "not", "self", ".", "_schbyname", "[", "schema", ".", "name", "]", ":", "del", "self", ".", "_schbyname", "[", "schema", ".", "name", "]" ]
Unregister a schema registered with input uuid. :raises: KeyError if uuid is not already registered.
[ "Unregister", "a", "schema", "registered", "with", "input", "uuid", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/registry.py#L122-L132
243,106
b3j0f/schema
b3j0f/schema/registry.py
SchemaRegistry.getbyuuid
def getbyuuid(self, uuid): """Get a schema by given uuid. :param str uuid: schema uuid to retrieve. :rtype: Schema :raises: KeyError if uuid is not registered already. """ if uuid not in self._schbyuuid: raise KeyError('uuid {0} not registered'.format(uuid)) return self._schbyuuid[uuid]
python
def getbyuuid(self, uuid): """Get a schema by given uuid. :param str uuid: schema uuid to retrieve. :rtype: Schema :raises: KeyError if uuid is not registered already. """ if uuid not in self._schbyuuid: raise KeyError('uuid {0} not registered'.format(uuid)) return self._schbyuuid[uuid]
[ "def", "getbyuuid", "(", "self", ",", "uuid", ")", ":", "if", "uuid", "not", "in", "self", ".", "_schbyuuid", ":", "raise", "KeyError", "(", "'uuid {0} not registered'", ".", "format", "(", "uuid", ")", ")", "return", "self", ".", "_schbyuuid", "[", "uuid", "]" ]
Get a schema by given uuid. :param str uuid: schema uuid to retrieve. :rtype: Schema :raises: KeyError if uuid is not registered already.
[ "Get", "a", "schema", "by", "given", "uuid", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/registry.py#L154-L164
243,107
b3j0f/schema
b3j0f/schema/registry.py
SchemaRegistry.getbyname
def getbyname(self, name): """Get schemas by given name. :param str name: schema names to retrieve. :rtype: list :raises: KeyError if name is not registered already. """ if name not in self._schbyname: raise KeyError('name {0} not registered'.format(name)) return self._schbyname[name]
python
def getbyname(self, name): """Get schemas by given name. :param str name: schema names to retrieve. :rtype: list :raises: KeyError if name is not registered already. """ if name not in self._schbyname: raise KeyError('name {0} not registered'.format(name)) return self._schbyname[name]
[ "def", "getbyname", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "_schbyname", ":", "raise", "KeyError", "(", "'name {0} not registered'", ".", "format", "(", "name", ")", ")", "return", "self", ".", "_schbyname", "[", "name", "]" ]
Get schemas by given name. :param str name: schema names to retrieve. :rtype: list :raises: KeyError if name is not registered already.
[ "Get", "schemas", "by", "given", "name", "." ]
1c88c23337f5fef50254e65bd407112c43396dd9
https://github.com/b3j0f/schema/blob/1c88c23337f5fef50254e65bd407112c43396dd9/b3j0f/schema/registry.py#L166-L176
243,108
firstprayer/monsql
monsql/wrapper_sqlite3.py
SQLite3Database.truncate_table
def truncate_table(self, tablename): """ SQLite3 doesn't support direct truncate, so we just use delete here """ self.get(tablename).remove() self.db.commit()
python
def truncate_table(self, tablename): """ SQLite3 doesn't support direct truncate, so we just use delete here """ self.get(tablename).remove() self.db.commit()
[ "def", "truncate_table", "(", "self", ",", "tablename", ")", ":", "self", ".", "get", "(", "tablename", ")", ".", "remove", "(", ")", "self", ".", "db", ".", "commit", "(", ")" ]
SQLite3 doesn't support direct truncate, so we just use delete here
[ "SQLite3", "doesn", "t", "support", "direct", "truncate", "so", "we", "just", "use", "delete", "here" ]
6285c15b574c8664046eae2edfeb548c7b173efd
https://github.com/firstprayer/monsql/blob/6285c15b574c8664046eae2edfeb548c7b173efd/monsql/wrapper_sqlite3.py#L41-L46
243,109
vinu76jsr/pipsort
lib/pipsort/core/_logger.py
_Logger.start
def start(self, level="WARN"): """ Start logging with this logger. Until the logger is started, no messages will be emitted. This applies to all loggers with the same name and any child loggers. Messages less than the given priority level will be ignored. The default level is 'WARN', which conforms to the *nix convention that a successful run should produce no diagnostic output. Available levels and their suggested meanings: DEBUG - output useful for developers INFO - trace normal program flow, especially external interactions WARN - an abnormal condition was detected that might need attention ERROR - an error was detected but execution continued CRITICAL - an error was detected and execution was halted """ if self.active: return handler = StreamHandler() # stderr handler.setFormatter(Formatter(self.LOGFMT)) self.addHandler(handler) self.setLevel(level.upper()) self.active = True return
python
def start(self, level="WARN"): """ Start logging with this logger. Until the logger is started, no messages will be emitted. This applies to all loggers with the same name and any child loggers. Messages less than the given priority level will be ignored. The default level is 'WARN', which conforms to the *nix convention that a successful run should produce no diagnostic output. Available levels and their suggested meanings: DEBUG - output useful for developers INFO - trace normal program flow, especially external interactions WARN - an abnormal condition was detected that might need attention ERROR - an error was detected but execution continued CRITICAL - an error was detected and execution was halted """ if self.active: return handler = StreamHandler() # stderr handler.setFormatter(Formatter(self.LOGFMT)) self.addHandler(handler) self.setLevel(level.upper()) self.active = True return
[ "def", "start", "(", "self", ",", "level", "=", "\"WARN\"", ")", ":", "if", "self", ".", "active", ":", "return", "handler", "=", "StreamHandler", "(", ")", "# stderr", "handler", ".", "setFormatter", "(", "Formatter", "(", "self", ".", "LOGFMT", ")", ")", "self", ".", "addHandler", "(", "handler", ")", "self", ".", "setLevel", "(", "level", ".", "upper", "(", ")", ")", "self", ".", "active", "=", "True", "return" ]
Start logging with this logger. Until the logger is started, no messages will be emitted. This applies to all loggers with the same name and any child loggers. Messages less than the given priority level will be ignored. The default level is 'WARN', which conforms to the *nix convention that a successful run should produce no diagnostic output. Available levels and their suggested meanings: DEBUG - output useful for developers INFO - trace normal program flow, especially external interactions WARN - an abnormal condition was detected that might need attention ERROR - an error was detected but execution continued CRITICAL - an error was detected and execution was halted
[ "Start", "logging", "with", "this", "logger", "." ]
71ead1269de85ee0255741390bf1da85d81b7d16
https://github.com/vinu76jsr/pipsort/blob/71ead1269de85ee0255741390bf1da85d81b7d16/lib/pipsort/core/_logger.py#L38-L63
243,110
vinu76jsr/pipsort
lib/pipsort/core/_logger.py
_Logger.stop
def stop(self): """ Stop logging with this logger. """ if not self.active: return self.removeHandler(self.handlers[-1]) self.active = False return
python
def stop(self): """ Stop logging with this logger. """ if not self.active: return self.removeHandler(self.handlers[-1]) self.active = False return
[ "def", "stop", "(", "self", ")", ":", "if", "not", "self", ".", "active", ":", "return", "self", ".", "removeHandler", "(", "self", ".", "handlers", "[", "-", "1", "]", ")", "self", ".", "active", "=", "False", "return" ]
Stop logging with this logger.
[ "Stop", "logging", "with", "this", "logger", "." ]
71ead1269de85ee0255741390bf1da85d81b7d16
https://github.com/vinu76jsr/pipsort/blob/71ead1269de85ee0255741390bf1da85d81b7d16/lib/pipsort/core/_logger.py#L65-L73
243,111
jalanb/pysyte
pysyte/tracebacks.py
parse_line
def parse_line(string): """Parse a single string as traceback line""" match = line_regexp().match(string) if match: matches = match.groupdict() line_number = matches['line_number'] path_to_python = matches['path_to_python'] spaceless_path_to_python = matches['spaceless_path_to_python'] if path_to_python: return path_to_python, line_number elif spaceless_path_to_python: return spaceless_path_to_python, line_number
python
def parse_line(string): """Parse a single string as traceback line""" match = line_regexp().match(string) if match: matches = match.groupdict() line_number = matches['line_number'] path_to_python = matches['path_to_python'] spaceless_path_to_python = matches['spaceless_path_to_python'] if path_to_python: return path_to_python, line_number elif spaceless_path_to_python: return spaceless_path_to_python, line_number
[ "def", "parse_line", "(", "string", ")", ":", "match", "=", "line_regexp", "(", ")", ".", "match", "(", "string", ")", "if", "match", ":", "matches", "=", "match", ".", "groupdict", "(", ")", "line_number", "=", "matches", "[", "'line_number'", "]", "path_to_python", "=", "matches", "[", "'path_to_python'", "]", "spaceless_path_to_python", "=", "matches", "[", "'spaceless_path_to_python'", "]", "if", "path_to_python", ":", "return", "path_to_python", ",", "line_number", "elif", "spaceless_path_to_python", ":", "return", "spaceless_path_to_python", ",", "line_number" ]
Parse a single string as traceback line
[ "Parse", "a", "single", "string", "as", "traceback", "line" ]
4e278101943d1ceb1a6bcaf6ddc72052ecf13114
https://github.com/jalanb/pysyte/blob/4e278101943d1ceb1a6bcaf6ddc72052ecf13114/pysyte/tracebacks.py#L25-L36
243,112
timeyyy/apptools
peasoup/util.py
sftp_upload_window_size_set
def sftp_upload_window_size_set(srv,file, method_to_call='put'): ''' sets config for uploading files with pysftp ''' channel = srv.sftp_client.get_channel() channel.lock.acquire() channel.out_window_size += os.stat(file).st_size * 1.1 # bit more bytes incase packet loss channel.out_buffer_cv.notifyAll() channel.lock.release()
python
def sftp_upload_window_size_set(srv,file, method_to_call='put'): ''' sets config for uploading files with pysftp ''' channel = srv.sftp_client.get_channel() channel.lock.acquire() channel.out_window_size += os.stat(file).st_size * 1.1 # bit more bytes incase packet loss channel.out_buffer_cv.notifyAll() channel.lock.release()
[ "def", "sftp_upload_window_size_set", "(", "srv", ",", "file", ",", "method_to_call", "=", "'put'", ")", ":", "channel", "=", "srv", ".", "sftp_client", ".", "get_channel", "(", ")", "channel", ".", "lock", ".", "acquire", "(", ")", "channel", ".", "out_window_size", "+=", "os", ".", "stat", "(", "file", ")", ".", "st_size", "*", "1.1", "# bit more bytes incase packet loss", "channel", ".", "out_buffer_cv", ".", "notifyAll", "(", ")", "channel", ".", "lock", ".", "release", "(", ")" ]
sets config for uploading files with pysftp
[ "sets", "config", "for", "uploading", "files", "with", "pysftp" ]
d3c0f324b0c2689c35f5601348276f4efd6cb240
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/util.py#L76-L84
243,113
se-esss-litterbox/Pynac
Pynac/Plotting.py
PynPlt.plotit
def plotit(self): ''' Produce the plots requested in the Dynac input file. This makes the same plots as produced by the Dynac ``plotit`` command. ''' [self._plot(i) for i in range(len(self.plots))]
python
def plotit(self): ''' Produce the plots requested in the Dynac input file. This makes the same plots as produced by the Dynac ``plotit`` command. ''' [self._plot(i) for i in range(len(self.plots))]
[ "def", "plotit", "(", "self", ")", ":", "[", "self", ".", "_plot", "(", "i", ")", "for", "i", "in", "range", "(", "len", "(", "self", ".", "plots", ")", ")", "]" ]
Produce the plots requested in the Dynac input file. This makes the same plots as produced by the Dynac ``plotit`` command.
[ "Produce", "the", "plots", "requested", "in", "the", "Dynac", "input", "file", ".", "This", "makes", "the", "same", "plots", "as", "produced", "by", "the", "Dynac", "plotit", "command", "." ]
97e20aa85d20112cd114faa54a8197c5d0f61209
https://github.com/se-esss-litterbox/Pynac/blob/97e20aa85d20112cd114faa54a8197c5d0f61209/Pynac/Plotting.py#L109-L114
243,114
maxweisspoker/simplebitcoinfuncs
simplebitcoinfuncs/bitcoin.py
multiplypub
def multiplypub(pub,priv,outcompressed=True): ''' Input pubkey must be hex string and valid pubkey. Input privkey must be 64-char hex string. Pubkey input can be compressed or uncompressed, as long as it's a valid key and a hex string. Use the validatepubkey() function to validate the public key first. The compression of the input public key does not do anything or matter in any way. ''' if len(pub) == 66: pub = uncompress(pub) x, y = ecmultiply(int(pub[2:66],16),int(pub[66:],16),int(priv,16)) x = dechex(x,32) y = dechex(y,32) o = '04' + x + y if outcompressed: return compress(o) else: return o
python
def multiplypub(pub,priv,outcompressed=True): ''' Input pubkey must be hex string and valid pubkey. Input privkey must be 64-char hex string. Pubkey input can be compressed or uncompressed, as long as it's a valid key and a hex string. Use the validatepubkey() function to validate the public key first. The compression of the input public key does not do anything or matter in any way. ''' if len(pub) == 66: pub = uncompress(pub) x, y = ecmultiply(int(pub[2:66],16),int(pub[66:],16),int(priv,16)) x = dechex(x,32) y = dechex(y,32) o = '04' + x + y if outcompressed: return compress(o) else: return o
[ "def", "multiplypub", "(", "pub", ",", "priv", ",", "outcompressed", "=", "True", ")", ":", "if", "len", "(", "pub", ")", "==", "66", ":", "pub", "=", "uncompress", "(", "pub", ")", "x", ",", "y", "=", "ecmultiply", "(", "int", "(", "pub", "[", "2", ":", "66", "]", ",", "16", ")", ",", "int", "(", "pub", "[", "66", ":", "]", ",", "16", ")", ",", "int", "(", "priv", ",", "16", ")", ")", "x", "=", "dechex", "(", "x", ",", "32", ")", "y", "=", "dechex", "(", "y", ",", "32", ")", "o", "=", "'04'", "+", "x", "+", "y", "if", "outcompressed", ":", "return", "compress", "(", "o", ")", "else", ":", "return", "o" ]
Input pubkey must be hex string and valid pubkey. Input privkey must be 64-char hex string. Pubkey input can be compressed or uncompressed, as long as it's a valid key and a hex string. Use the validatepubkey() function to validate the public key first. The compression of the input public key does not do anything or matter in any way.
[ "Input", "pubkey", "must", "be", "hex", "string", "and", "valid", "pubkey", ".", "Input", "privkey", "must", "be", "64", "-", "char", "hex", "string", "." ]
ad332433dfcc067e86d2e77fa0c8f1a27daffb63
https://github.com/maxweisspoker/simplebitcoinfuncs/blob/ad332433dfcc067e86d2e77fa0c8f1a27daffb63/simplebitcoinfuncs/bitcoin.py#L105-L125
243,115
maxweisspoker/simplebitcoinfuncs
simplebitcoinfuncs/bitcoin.py
validatepubkey
def validatepubkey(pub): ''' Returns input key if it's a valid hex public key, or False otherwise. Input must be hex string, not bytes or integer/long or anything else. ''' try: pub = hexstrlify(unhexlify(pub)) except: return False if len(pub) == 130: if pub[:2] != '04': return False if uncompress(compress(pub)) != pub: return False elif len(pub) == 66: if pub[:2] != '02' and pub[:2] != '03': return False else: return False return pub
python
def validatepubkey(pub): ''' Returns input key if it's a valid hex public key, or False otherwise. Input must be hex string, not bytes or integer/long or anything else. ''' try: pub = hexstrlify(unhexlify(pub)) except: return False if len(pub) == 130: if pub[:2] != '04': return False if uncompress(compress(pub)) != pub: return False elif len(pub) == 66: if pub[:2] != '02' and pub[:2] != '03': return False else: return False return pub
[ "def", "validatepubkey", "(", "pub", ")", ":", "try", ":", "pub", "=", "hexstrlify", "(", "unhexlify", "(", "pub", ")", ")", "except", ":", "return", "False", "if", "len", "(", "pub", ")", "==", "130", ":", "if", "pub", "[", ":", "2", "]", "!=", "'04'", ":", "return", "False", "if", "uncompress", "(", "compress", "(", "pub", ")", ")", "!=", "pub", ":", "return", "False", "elif", "len", "(", "pub", ")", "==", "66", ":", "if", "pub", "[", ":", "2", "]", "!=", "'02'", "and", "pub", "[", ":", "2", "]", "!=", "'03'", ":", "return", "False", "else", ":", "return", "False", "return", "pub" ]
Returns input key if it's a valid hex public key, or False otherwise. Input must be hex string, not bytes or integer/long or anything else.
[ "Returns", "input", "key", "if", "it", "s", "a", "valid", "hex", "public", "key", "or", "False", "otherwise", "." ]
ad332433dfcc067e86d2e77fa0c8f1a27daffb63
https://github.com/maxweisspoker/simplebitcoinfuncs/blob/ad332433dfcc067e86d2e77fa0c8f1a27daffb63/simplebitcoinfuncs/bitcoin.py#L197-L220
243,116
maxweisspoker/simplebitcoinfuncs
simplebitcoinfuncs/bitcoin.py
privtohex
def privtohex(key): ''' Used for getting unknown input type into a private key. For example, if you ask a user to input a private key, and they may input hex, WIF, integer, etc. Run it through this function to get a standardized format. Function either outputs private key hex string or raises an exception. It's really going to try to make any input into a private key, so make sure that whatever you import is indeed supposed to be a private key. For example, if you put an int in, it will turn that into a key. Make sure you want a key when you use this function!!! ''' if isitint(key): key = dechex(key,32) else: try: key, z, zz = wiftohex(key) assert len(key) == 64 except: try: key = unhexlify(key) except: try: key1 = hexstrlify(key) assert len(key1) == 64 or len(key1) == 66 or len(key1) == 68 if len(key1) == 68: assert key1[-2:] == '01' key = key1 except: raise Exception("Cannot interpret input key.") else: key = hexstrlify(key) if len(key) == 68: assert key[-2:] == '01' key = key[:-2] if len(key) == 66: key = key[2:] assert len(key) == 64 return key
python
def privtohex(key): ''' Used for getting unknown input type into a private key. For example, if you ask a user to input a private key, and they may input hex, WIF, integer, etc. Run it through this function to get a standardized format. Function either outputs private key hex string or raises an exception. It's really going to try to make any input into a private key, so make sure that whatever you import is indeed supposed to be a private key. For example, if you put an int in, it will turn that into a key. Make sure you want a key when you use this function!!! ''' if isitint(key): key = dechex(key,32) else: try: key, z, zz = wiftohex(key) assert len(key) == 64 except: try: key = unhexlify(key) except: try: key1 = hexstrlify(key) assert len(key1) == 64 or len(key1) == 66 or len(key1) == 68 if len(key1) == 68: assert key1[-2:] == '01' key = key1 except: raise Exception("Cannot interpret input key.") else: key = hexstrlify(key) if len(key) == 68: assert key[-2:] == '01' key = key[:-2] if len(key) == 66: key = key[2:] assert len(key) == 64 return key
[ "def", "privtohex", "(", "key", ")", ":", "if", "isitint", "(", "key", ")", ":", "key", "=", "dechex", "(", "key", ",", "32", ")", "else", ":", "try", ":", "key", ",", "z", ",", "zz", "=", "wiftohex", "(", "key", ")", "assert", "len", "(", "key", ")", "==", "64", "except", ":", "try", ":", "key", "=", "unhexlify", "(", "key", ")", "except", ":", "try", ":", "key1", "=", "hexstrlify", "(", "key", ")", "assert", "len", "(", "key1", ")", "==", "64", "or", "len", "(", "key1", ")", "==", "66", "or", "len", "(", "key1", ")", "==", "68", "if", "len", "(", "key1", ")", "==", "68", ":", "assert", "key1", "[", "-", "2", ":", "]", "==", "'01'", "key", "=", "key1", "except", ":", "raise", "Exception", "(", "\"Cannot interpret input key.\"", ")", "else", ":", "key", "=", "hexstrlify", "(", "key", ")", "if", "len", "(", "key", ")", "==", "68", ":", "assert", "key", "[", "-", "2", ":", "]", "==", "'01'", "key", "=", "key", "[", ":", "-", "2", "]", "if", "len", "(", "key", ")", "==", "66", ":", "key", "=", "key", "[", "2", ":", "]", "assert", "len", "(", "key", ")", "==", "64", "return", "key" ]
Used for getting unknown input type into a private key. For example, if you ask a user to input a private key, and they may input hex, WIF, integer, etc. Run it through this function to get a standardized format. Function either outputs private key hex string or raises an exception. It's really going to try to make any input into a private key, so make sure that whatever you import is indeed supposed to be a private key. For example, if you put an int in, it will turn that into a key. Make sure you want a key when you use this function!!!
[ "Used", "for", "getting", "unknown", "input", "type", "into", "a", "private", "key", "." ]
ad332433dfcc067e86d2e77fa0c8f1a27daffb63
https://github.com/maxweisspoker/simplebitcoinfuncs/blob/ad332433dfcc067e86d2e77fa0c8f1a27daffb63/simplebitcoinfuncs/bitcoin.py#L245-L287
243,117
olsoneric/pedemath
pedemath/quat.py
conjugate_quat
def conjugate_quat(quat): """Negate the vector part of the quaternion.""" return Quat(-quat.x, -quat.y, -quat.z, quat.w)
python
def conjugate_quat(quat): """Negate the vector part of the quaternion.""" return Quat(-quat.x, -quat.y, -quat.z, quat.w)
[ "def", "conjugate_quat", "(", "quat", ")", ":", "return", "Quat", "(", "-", "quat", ".", "x", ",", "-", "quat", ".", "y", ",", "-", "quat", ".", "z", ",", "quat", ".", "w", ")" ]
Negate the vector part of the quaternion.
[ "Negate", "the", "vector", "part", "of", "the", "quaternion", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/quat.py#L17-L19
243,118
olsoneric/pedemath
pedemath/quat.py
lerp_quat
def lerp_quat(from_quat, to_quat, percent): """Return linear interpolation of two quaternions.""" # Check if signs need to be reversed. if dot_quat(from_quat, to_quat) < 0.0: to_sign = -1 else: to_sign = 1 # Simple linear interpolation percent_from = 1.0 - percent percent_to = percent result = Quat( percent_from * from_quat.x + to_sign * percent_to * to_quat.x, percent_from * from_quat.y + to_sign * percent_to * to_quat.y, percent_from * from_quat.z + to_sign * percent_to * to_quat.z, percent_from * from_quat.w + to_sign * percent_to * to_quat.w) return result
python
def lerp_quat(from_quat, to_quat, percent): """Return linear interpolation of two quaternions.""" # Check if signs need to be reversed. if dot_quat(from_quat, to_quat) < 0.0: to_sign = -1 else: to_sign = 1 # Simple linear interpolation percent_from = 1.0 - percent percent_to = percent result = Quat( percent_from * from_quat.x + to_sign * percent_to * to_quat.x, percent_from * from_quat.y + to_sign * percent_to * to_quat.y, percent_from * from_quat.z + to_sign * percent_to * to_quat.z, percent_from * from_quat.w + to_sign * percent_to * to_quat.w) return result
[ "def", "lerp_quat", "(", "from_quat", ",", "to_quat", ",", "percent", ")", ":", "# Check if signs need to be reversed.", "if", "dot_quat", "(", "from_quat", ",", "to_quat", ")", "<", "0.0", ":", "to_sign", "=", "-", "1", "else", ":", "to_sign", "=", "1", "# Simple linear interpolation", "percent_from", "=", "1.0", "-", "percent", "percent_to", "=", "percent", "result", "=", "Quat", "(", "percent_from", "*", "from_quat", ".", "x", "+", "to_sign", "*", "percent_to", "*", "to_quat", ".", "x", ",", "percent_from", "*", "from_quat", ".", "y", "+", "to_sign", "*", "percent_to", "*", "to_quat", ".", "y", ",", "percent_from", "*", "from_quat", ".", "z", "+", "to_sign", "*", "percent_to", "*", "to_quat", ".", "z", ",", "percent_from", "*", "from_quat", ".", "w", "+", "to_sign", "*", "percent_to", "*", "to_quat", ".", "w", ")", "return", "result" ]
Return linear interpolation of two quaternions.
[ "Return", "linear", "interpolation", "of", "two", "quaternions", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/quat.py#L27-L46
243,119
olsoneric/pedemath
pedemath/quat.py
nlerp_quat
def nlerp_quat(from_quat, to_quat, percent): """Return normalized linear interpolation of two quaternions. Less computationally expensive than slerp (which not implemented in this lib yet), but does not maintain a constant velocity like slerp. """ result = lerp_quat(from_quat, to_quat, percent) result.normalize() return result
python
def nlerp_quat(from_quat, to_quat, percent): """Return normalized linear interpolation of two quaternions. Less computationally expensive than slerp (which not implemented in this lib yet), but does not maintain a constant velocity like slerp. """ result = lerp_quat(from_quat, to_quat, percent) result.normalize() return result
[ "def", "nlerp_quat", "(", "from_quat", ",", "to_quat", ",", "percent", ")", ":", "result", "=", "lerp_quat", "(", "from_quat", ",", "to_quat", ",", "percent", ")", "result", ".", "normalize", "(", ")", "return", "result" ]
Return normalized linear interpolation of two quaternions. Less computationally expensive than slerp (which not implemented in this lib yet), but does not maintain a constant velocity like slerp.
[ "Return", "normalized", "linear", "interpolation", "of", "two", "quaternions", "." ]
4bffcfe7089e421d603eb0a9708b84789c2d16be
https://github.com/olsoneric/pedemath/blob/4bffcfe7089e421d603eb0a9708b84789c2d16be/pedemath/quat.py#L49-L58
243,120
PRIArobotics/HedgehogUtils
hedgehog/utils/asyncio.py
repeat_func
def repeat_func(func: Callable[[], Union[T, Awaitable[T]]], times: int=None, *, interval: float=0) -> AsyncIterator[T]: """ Repeats the result of a 0-ary function either indefinitely, or for a defined number of times. `times` and `interval` behave exactly like with `aiostream.create.repeat`. A useful idiom is to combine an indefinite `repeat_func` stream with `aiostream.select.takewhile` to terminate the stream at some point. """ base = stream.repeat.raw((), times, interval=interval) return cast(AsyncIterator[T], stream.starmap.raw(base, func, task_limit=1))
python
def repeat_func(func: Callable[[], Union[T, Awaitable[T]]], times: int=None, *, interval: float=0) -> AsyncIterator[T]: """ Repeats the result of a 0-ary function either indefinitely, or for a defined number of times. `times` and `interval` behave exactly like with `aiostream.create.repeat`. A useful idiom is to combine an indefinite `repeat_func` stream with `aiostream.select.takewhile` to terminate the stream at some point. """ base = stream.repeat.raw((), times, interval=interval) return cast(AsyncIterator[T], stream.starmap.raw(base, func, task_limit=1))
[ "def", "repeat_func", "(", "func", ":", "Callable", "[", "[", "]", ",", "Union", "[", "T", ",", "Awaitable", "[", "T", "]", "]", "]", ",", "times", ":", "int", "=", "None", ",", "*", ",", "interval", ":", "float", "=", "0", ")", "->", "AsyncIterator", "[", "T", "]", ":", "base", "=", "stream", ".", "repeat", ".", "raw", "(", "(", ")", ",", "times", ",", "interval", "=", "interval", ")", "return", "cast", "(", "AsyncIterator", "[", "T", "]", ",", "stream", ".", "starmap", ".", "raw", "(", "base", ",", "func", ",", "task_limit", "=", "1", ")", ")" ]
Repeats the result of a 0-ary function either indefinitely, or for a defined number of times. `times` and `interval` behave exactly like with `aiostream.create.repeat`. A useful idiom is to combine an indefinite `repeat_func` stream with `aiostream.select.takewhile` to terminate the stream at some point.
[ "Repeats", "the", "result", "of", "a", "0", "-", "ary", "function", "either", "indefinitely", "or", "for", "a", "defined", "number", "of", "times", ".", "times", "and", "interval", "behave", "exactly", "like", "with", "aiostream", ".", "create", ".", "repeat", "." ]
cc368df270288c870cc66d707696ccb62823ca9c
https://github.com/PRIArobotics/HedgehogUtils/blob/cc368df270288c870cc66d707696ccb62823ca9c/hedgehog/utils/asyncio.py#L15-L24
243,121
PRIArobotics/HedgehogUtils
hedgehog/utils/asyncio.py
repeat_func_eof
def repeat_func_eof(func: Callable[[], Union[T, Awaitable[T]]], eof: Any, *, interval: float=0, use_is: bool=False) -> AsyncIterator[T]: """ Repeats the result of a 0-ary function until an `eof` item is reached. The `eof` item itself is not part of the resulting stream; by setting `use_is` to true, eof is checked by identity rather than equality. `times` and `interval` behave exactly like with `aiostream.create.repeat`. """ pred = (lambda item: item != eof) if not use_is else (lambda item: (item is not eof)) base = repeat_func.raw(func, interval=interval) return cast(AsyncIterator[T], stream.takewhile.raw(base, pred))
python
def repeat_func_eof(func: Callable[[], Union[T, Awaitable[T]]], eof: Any, *, interval: float=0, use_is: bool=False) -> AsyncIterator[T]: """ Repeats the result of a 0-ary function until an `eof` item is reached. The `eof` item itself is not part of the resulting stream; by setting `use_is` to true, eof is checked by identity rather than equality. `times` and `interval` behave exactly like with `aiostream.create.repeat`. """ pred = (lambda item: item != eof) if not use_is else (lambda item: (item is not eof)) base = repeat_func.raw(func, interval=interval) return cast(AsyncIterator[T], stream.takewhile.raw(base, pred))
[ "def", "repeat_func_eof", "(", "func", ":", "Callable", "[", "[", "]", ",", "Union", "[", "T", ",", "Awaitable", "[", "T", "]", "]", "]", ",", "eof", ":", "Any", ",", "*", ",", "interval", ":", "float", "=", "0", ",", "use_is", ":", "bool", "=", "False", ")", "->", "AsyncIterator", "[", "T", "]", ":", "pred", "=", "(", "lambda", "item", ":", "item", "!=", "eof", ")", "if", "not", "use_is", "else", "(", "lambda", "item", ":", "(", "item", "is", "not", "eof", ")", ")", "base", "=", "repeat_func", ".", "raw", "(", "func", ",", "interval", "=", "interval", ")", "return", "cast", "(", "AsyncIterator", "[", "T", "]", ",", "stream", ".", "takewhile", ".", "raw", "(", "base", ",", "pred", ")", ")" ]
Repeats the result of a 0-ary function until an `eof` item is reached. The `eof` item itself is not part of the resulting stream; by setting `use_is` to true, eof is checked by identity rather than equality. `times` and `interval` behave exactly like with `aiostream.create.repeat`.
[ "Repeats", "the", "result", "of", "a", "0", "-", "ary", "function", "until", "an", "eof", "item", "is", "reached", ".", "The", "eof", "item", "itself", "is", "not", "part", "of", "the", "resulting", "stream", ";", "by", "setting", "use_is", "to", "true", "eof", "is", "checked", "by", "identity", "rather", "than", "equality", ".", "times", "and", "interval", "behave", "exactly", "like", "with", "aiostream", ".", "create", ".", "repeat", "." ]
cc368df270288c870cc66d707696ccb62823ca9c
https://github.com/PRIArobotics/HedgehogUtils/blob/cc368df270288c870cc66d707696ccb62823ca9c/hedgehog/utils/asyncio.py#L28-L37
243,122
davidmiller/letter
letter/__init__.py
_stringlist
def _stringlist(*args): """ Take a lists of strings or strings and flatten these into a list of strings. Arguments: - `*args`: "" or [""...] Return: [""...] Exceptions: None """ return list(itertools.chain.from_iterable(itertools.repeat(x,1) if stringy(x) else x for x in args if x))
python
def _stringlist(*args): """ Take a lists of strings or strings and flatten these into a list of strings. Arguments: - `*args`: "" or [""...] Return: [""...] Exceptions: None """ return list(itertools.chain.from_iterable(itertools.repeat(x,1) if stringy(x) else x for x in args if x))
[ "def", "_stringlist", "(", "*", "args", ")", ":", "return", "list", "(", "itertools", ".", "chain", ".", "from_iterable", "(", "itertools", ".", "repeat", "(", "x", ",", "1", ")", "if", "stringy", "(", "x", ")", "else", "x", "for", "x", "in", "args", "if", "x", ")", ")" ]
Take a lists of strings or strings and flatten these into a list of strings. Arguments: - `*args`: "" or [""...] Return: [""...] Exceptions: None
[ "Take", "a", "lists", "of", "strings", "or", "strings", "and", "flatten", "these", "into", "a", "list", "of", "strings", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L51-L62
243,123
davidmiller/letter
letter/__init__.py
_parse_outgoing_mail
def _parse_outgoing_mail(sender, to, msgstring): """ Parse an outgoing mail and put it into the OUTBOX. Arguments: - `sender`: str - `to`: str - `msgstring`: str Return: None Exceptions: None """ global OUTBOX OUTBOX.append(email.message_from_string(msgstring)) return
python
def _parse_outgoing_mail(sender, to, msgstring): """ Parse an outgoing mail and put it into the OUTBOX. Arguments: - `sender`: str - `to`: str - `msgstring`: str Return: None Exceptions: None """ global OUTBOX OUTBOX.append(email.message_from_string(msgstring)) return
[ "def", "_parse_outgoing_mail", "(", "sender", ",", "to", ",", "msgstring", ")", ":", "global", "OUTBOX", "OUTBOX", ".", "append", "(", "email", ".", "message_from_string", "(", "msgstring", ")", ")", "return" ]
Parse an outgoing mail and put it into the OUTBOX. Arguments: - `sender`: str - `to`: str - `msgstring`: str Return: None Exceptions: None
[ "Parse", "an", "outgoing", "mail", "and", "put", "it", "into", "the", "OUTBOX", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L581-L596
243,124
davidmiller/letter
letter/__init__.py
Attachment.as_msg
def as_msg(self): """ Convert ourself to be a message part of the appropriate MIME type. Return: MIMEBase Exceptions: None """ # Based upon http://docs.python.org/2/library/email-examples.html # with minimal tweaking # Guess the content type based on the file's extension. Encoding # will be ignored, although we should check for simple things like # gzip'd or compressed files. ctype, encoding = mimetypes.guess_type(str(self.path)) if ctype is None or encoding is not None: # No guess could be made, or the file is encoded (compressed), so # use a generic bag-of-bits type. ctype = 'application/octet-stream' maintype, subtype = ctype.split('/', 1) if maintype == 'text': # Note: we should handle calculating the charset msg = MIMEText(self.path.read(), _subtype=subtype) elif maintype == 'image': fp = self.path.open('rb') msg = MIMEImage(fp.read(), _subtype=subtype) fp.close() elif maintype == 'audio': fp = self.path.open('rb') msg = MIMEAudio(fp.read(), _subtype=subtype) fp.close() else: fp = self.path.open('rb') msg = MIMEBase(maintype, subtype) msg.set_payload(fp.read()) fp.close() # Encode the payload using Base64 encoders.encode_base64(msg) filename = str(self.path[-1]) msg.add_header('Content-Disposition', 'attachment', filename=filename) return msg
python
def as_msg(self): """ Convert ourself to be a message part of the appropriate MIME type. Return: MIMEBase Exceptions: None """ # Based upon http://docs.python.org/2/library/email-examples.html # with minimal tweaking # Guess the content type based on the file's extension. Encoding # will be ignored, although we should check for simple things like # gzip'd or compressed files. ctype, encoding = mimetypes.guess_type(str(self.path)) if ctype is None or encoding is not None: # No guess could be made, or the file is encoded (compressed), so # use a generic bag-of-bits type. ctype = 'application/octet-stream' maintype, subtype = ctype.split('/', 1) if maintype == 'text': # Note: we should handle calculating the charset msg = MIMEText(self.path.read(), _subtype=subtype) elif maintype == 'image': fp = self.path.open('rb') msg = MIMEImage(fp.read(), _subtype=subtype) fp.close() elif maintype == 'audio': fp = self.path.open('rb') msg = MIMEAudio(fp.read(), _subtype=subtype) fp.close() else: fp = self.path.open('rb') msg = MIMEBase(maintype, subtype) msg.set_payload(fp.read()) fp.close() # Encode the payload using Base64 encoders.encode_base64(msg) filename = str(self.path[-1]) msg.add_header('Content-Disposition', 'attachment', filename=filename) return msg
[ "def", "as_msg", "(", "self", ")", ":", "# Based upon http://docs.python.org/2/library/email-examples.html", "# with minimal tweaking", "# Guess the content type based on the file's extension. Encoding", "# will be ignored, although we should check for simple things like", "# gzip'd or compressed files.", "ctype", ",", "encoding", "=", "mimetypes", ".", "guess_type", "(", "str", "(", "self", ".", "path", ")", ")", "if", "ctype", "is", "None", "or", "encoding", "is", "not", "None", ":", "# No guess could be made, or the file is encoded (compressed), so", "# use a generic bag-of-bits type.", "ctype", "=", "'application/octet-stream'", "maintype", ",", "subtype", "=", "ctype", ".", "split", "(", "'/'", ",", "1", ")", "if", "maintype", "==", "'text'", ":", "# Note: we should handle calculating the charset", "msg", "=", "MIMEText", "(", "self", ".", "path", ".", "read", "(", ")", ",", "_subtype", "=", "subtype", ")", "elif", "maintype", "==", "'image'", ":", "fp", "=", "self", ".", "path", ".", "open", "(", "'rb'", ")", "msg", "=", "MIMEImage", "(", "fp", ".", "read", "(", ")", ",", "_subtype", "=", "subtype", ")", "fp", ".", "close", "(", ")", "elif", "maintype", "==", "'audio'", ":", "fp", "=", "self", ".", "path", ".", "open", "(", "'rb'", ")", "msg", "=", "MIMEAudio", "(", "fp", ".", "read", "(", ")", ",", "_subtype", "=", "subtype", ")", "fp", ".", "close", "(", ")", "else", ":", "fp", "=", "self", ".", "path", ".", "open", "(", "'rb'", ")", "msg", "=", "MIMEBase", "(", "maintype", ",", "subtype", ")", "msg", ".", "set_payload", "(", "fp", ".", "read", "(", ")", ")", "fp", ".", "close", "(", ")", "# Encode the payload using Base64", "encoders", ".", "encode_base64", "(", "msg", ")", "filename", "=", "str", "(", "self", ".", "path", "[", "-", "1", "]", ")", "msg", ".", "add_header", "(", "'Content-Disposition'", ",", "'attachment'", ",", "filename", "=", "filename", ")", "return", "msg" ]
Convert ourself to be a message part of the appropriate MIME type. Return: MIMEBase Exceptions: None
[ "Convert", "ourself", "to", "be", "a", "message", "part", "of", "the", "appropriate", "MIME", "type", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L72-L113
243,125
davidmiller/letter
letter/__init__.py
BaseMailer.tolist
def tolist(self, to): """ Make sure that our addressees are a unicoded list Arguments: - `to`: str or list Return: [u, ...] Exceptions: None """ return ', '.join(isinstance(to, list) and [u(x) for x in to] or [u(to)])
python
def tolist(self, to): """ Make sure that our addressees are a unicoded list Arguments: - `to`: str or list Return: [u, ...] Exceptions: None """ return ', '.join(isinstance(to, list) and [u(x) for x in to] or [u(to)])
[ "def", "tolist", "(", "self", ",", "to", ")", ":", "return", "', '", ".", "join", "(", "isinstance", "(", "to", ",", "list", ")", "and", "[", "u", "(", "x", ")", "for", "x", "in", "to", "]", "or", "[", "u", "(", "to", ")", "]", ")" ]
Make sure that our addressees are a unicoded list Arguments: - `to`: str or list Return: [u, ...] Exceptions: None
[ "Make", "sure", "that", "our", "addressees", "are", "a", "unicoded", "list" ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L122-L132
243,126
davidmiller/letter
letter/__init__.py
BaseMailer.sanity_check
def sanity_check(self, sender, to, subject, plain=None, html=None, cc=None, bcc=None): """ Sanity check the message. If we have PLAIN and HTML versions, send a multipart alternative MIME message, else send whichever we do have. If we have neither, raise NoContentError Arguments: - `sender`: str - `to`: list - `subject`: str - `plain`: str - `html`: str Return: None Exceptions: NoContentError """ if not plain and not html: raise NoContentError()
python
def sanity_check(self, sender, to, subject, plain=None, html=None, cc=None, bcc=None): """ Sanity check the message. If we have PLAIN and HTML versions, send a multipart alternative MIME message, else send whichever we do have. If we have neither, raise NoContentError Arguments: - `sender`: str - `to`: list - `subject`: str - `plain`: str - `html`: str Return: None Exceptions: NoContentError """ if not plain and not html: raise NoContentError()
[ "def", "sanity_check", "(", "self", ",", "sender", ",", "to", ",", "subject", ",", "plain", "=", "None", ",", "html", "=", "None", ",", "cc", "=", "None", ",", "bcc", "=", "None", ")", ":", "if", "not", "plain", "and", "not", "html", ":", "raise", "NoContentError", "(", ")" ]
Sanity check the message. If we have PLAIN and HTML versions, send a multipart alternative MIME message, else send whichever we do have. If we have neither, raise NoContentError Arguments: - `sender`: str - `to`: list - `subject`: str - `plain`: str - `html`: str Return: None Exceptions: NoContentError
[ "Sanity", "check", "the", "message", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L134-L154
243,127
davidmiller/letter
letter/__init__.py
BasePostman._find_tpl
def _find_tpl(self, name, extension='.jinja2'): """ Return a Path object representing the Template we're after, searching SELF.tpls or None Arguments: - `name`: str Return: Path or None Exceptions: None """ found = None for loc in self.tpls: if not loc: continue contents = [f for f in loc.ls() if f.find(name) != -1 and f.endswith(extension)] if contents: found = contents[0] break exact = loc + (name + extension) if exact.is_file: found = exact return found
python
def _find_tpl(self, name, extension='.jinja2'): """ Return a Path object representing the Template we're after, searching SELF.tpls or None Arguments: - `name`: str Return: Path or None Exceptions: None """ found = None for loc in self.tpls: if not loc: continue contents = [f for f in loc.ls() if f.find(name) != -1 and f.endswith(extension)] if contents: found = contents[0] break exact = loc + (name + extension) if exact.is_file: found = exact return found
[ "def", "_find_tpl", "(", "self", ",", "name", ",", "extension", "=", "'.jinja2'", ")", ":", "found", "=", "None", "for", "loc", "in", "self", ".", "tpls", ":", "if", "not", "loc", ":", "continue", "contents", "=", "[", "f", "for", "f", "in", "loc", ".", "ls", "(", ")", "if", "f", ".", "find", "(", "name", ")", "!=", "-", "1", "and", "f", ".", "endswith", "(", "extension", ")", "]", "if", "contents", ":", "found", "=", "contents", "[", "0", "]", "break", "exact", "=", "loc", "+", "(", "name", "+", "extension", ")", "if", "exact", ".", "is_file", ":", "found", "=", "exact", "return", "found" ]
Return a Path object representing the Template we're after, searching SELF.tpls or None Arguments: - `name`: str Return: Path or None Exceptions: None
[ "Return", "a", "Path", "object", "representing", "the", "Template", "we", "re", "after", "searching", "SELF", ".", "tpls", "or", "None" ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L354-L376
243,128
davidmiller/letter
letter/__init__.py
BasePostman._find_tpls
def _find_tpls(self, name): """ Return plain, html templates for NAME Arguments: - `name`: str Return: tuple Exceptions: None """ return self._find_tpl(name, extension='.txt'), self._find_tpl(name, extension='.html')
python
def _find_tpls(self, name): """ Return plain, html templates for NAME Arguments: - `name`: str Return: tuple Exceptions: None """ return self._find_tpl(name, extension='.txt'), self._find_tpl(name, extension='.html')
[ "def", "_find_tpls", "(", "self", ",", "name", ")", ":", "return", "self", ".", "_find_tpl", "(", "name", ",", "extension", "=", "'.txt'", ")", ",", "self", ".", "_find_tpl", "(", "name", ",", "extension", "=", "'.html'", ")" ]
Return plain, html templates for NAME Arguments: - `name`: str Return: tuple Exceptions: None
[ "Return", "plain", "html", "templates", "for", "NAME" ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L378-L388
243,129
davidmiller/letter
letter/__init__.py
BasePostman._sendtpl
def _sendtpl(self, sender, to, subject, cc=None, bcc=None, attach=None, replyto=None, **kwargs): """ Send a Letter from SENDER to TO, with the subject SUBJECT. Use the current template, with KWARGS as the context. Arguments: - `sender`: unicode - `to`: unicode - `subject`: unicode - `cc`: str or [str] - `bcc`: str or [str] - `replyto`: str - `**kwargs`: objects Return: None Exceptions: None """ plain, html = self.body(**kwargs) self.mailer.send(sender, to, subject, plain=plain, html=html, cc=cc, bcc=bcc, replyto=replyto, attach=attach) return
python
def _sendtpl(self, sender, to, subject, cc=None, bcc=None, attach=None, replyto=None, **kwargs): """ Send a Letter from SENDER to TO, with the subject SUBJECT. Use the current template, with KWARGS as the context. Arguments: - `sender`: unicode - `to`: unicode - `subject`: unicode - `cc`: str or [str] - `bcc`: str or [str] - `replyto`: str - `**kwargs`: objects Return: None Exceptions: None """ plain, html = self.body(**kwargs) self.mailer.send(sender, to, subject, plain=plain, html=html, cc=cc, bcc=bcc, replyto=replyto, attach=attach) return
[ "def", "_sendtpl", "(", "self", ",", "sender", ",", "to", ",", "subject", ",", "cc", "=", "None", ",", "bcc", "=", "None", ",", "attach", "=", "None", ",", "replyto", "=", "None", ",", "*", "*", "kwargs", ")", ":", "plain", ",", "html", "=", "self", ".", "body", "(", "*", "*", "kwargs", ")", "self", ".", "mailer", ".", "send", "(", "sender", ",", "to", ",", "subject", ",", "plain", "=", "plain", ",", "html", "=", "html", ",", "cc", "=", "cc", ",", "bcc", "=", "bcc", ",", "replyto", "=", "replyto", ",", "attach", "=", "attach", ")", "return" ]
Send a Letter from SENDER to TO, with the subject SUBJECT. Use the current template, with KWARGS as the context. Arguments: - `sender`: unicode - `to`: unicode - `subject`: unicode - `cc`: str or [str] - `bcc`: str or [str] - `replyto`: str - `**kwargs`: objects Return: None Exceptions: None
[ "Send", "a", "Letter", "from", "SENDER", "to", "TO", "with", "the", "subject", "SUBJECT", ".", "Use", "the", "current", "template", "with", "KWARGS", "as", "the", "context", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L411-L431
243,130
davidmiller/letter
letter/__init__.py
BasePostman.body
def body(self, **kwargs): """ Return the plain and html versions of our contents. Return: tuple Exceptions: None """ text_content, html_content = None, None if self.plain: text_content = mold.cast(self.plain, **kwargs) if self.html: html_content = mold.cast(self.html, **kwargs) return text_content, html_content
python
def body(self, **kwargs): """ Return the plain and html versions of our contents. Return: tuple Exceptions: None """ text_content, html_content = None, None if self.plain: text_content = mold.cast(self.plain, **kwargs) if self.html: html_content = mold.cast(self.html, **kwargs) return text_content, html_content
[ "def", "body", "(", "self", ",", "*", "*", "kwargs", ")", ":", "text_content", ",", "html_content", "=", "None", ",", "None", "if", "self", ".", "plain", ":", "text_content", "=", "mold", ".", "cast", "(", "self", ".", "plain", ",", "*", "*", "kwargs", ")", "if", "self", ".", "html", ":", "html_content", "=", "mold", ".", "cast", "(", "self", ".", "html", ",", "*", "*", "kwargs", ")", "return", "text_content", ",", "html_content" ]
Return the plain and html versions of our contents. Return: tuple Exceptions: None
[ "Return", "the", "plain", "and", "html", "versions", "of", "our", "contents", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L433-L445
243,131
davidmiller/letter
letter/__init__.py
BasePostman.template
def template(self, name): """ Set an active template to use with our Postman. This changes the call signature of send. Arguments: - `name`: str Return: None Exceptions: None """ self.plain, self.html = self._find_tpls(name) if not self.plain: self.plain = self._find_tpl(name) try: self.send = self._sendtpl yield finally: self.plain, self.html = None, None self.send = self._send
python
def template(self, name): """ Set an active template to use with our Postman. This changes the call signature of send. Arguments: - `name`: str Return: None Exceptions: None """ self.plain, self.html = self._find_tpls(name) if not self.plain: self.plain = self._find_tpl(name) try: self.send = self._sendtpl yield finally: self.plain, self.html = None, None self.send = self._send
[ "def", "template", "(", "self", ",", "name", ")", ":", "self", ".", "plain", ",", "self", ".", "html", "=", "self", ".", "_find_tpls", "(", "name", ")", "if", "not", "self", ".", "plain", ":", "self", ".", "plain", "=", "self", ".", "_find_tpl", "(", "name", ")", "try", ":", "self", ".", "send", "=", "self", ".", "_sendtpl", "yield", "finally", ":", "self", ".", "plain", ",", "self", ".", "html", "=", "None", ",", "None", "self", ".", "send", "=", "self", ".", "_send" ]
Set an active template to use with our Postman. This changes the call signature of send. Arguments: - `name`: str Return: None Exceptions: None
[ "Set", "an", "active", "template", "to", "use", "with", "our", "Postman", "." ]
c0c66ae2c6a792106e9a8374a01421817c8a8ae0
https://github.com/davidmiller/letter/blob/c0c66ae2c6a792106e9a8374a01421817c8a8ae0/letter/__init__.py#L448-L468
243,132
diffeo/rejester
rejester/_task_master.py
Worker.environment
def environment(self): '''Get raw data about this worker. This is recorded in the :meth:`heartbeat` info, and can be retrieved by :meth:`TaskMaster.get_heartbeat`. The dictionary includes keys ``worker_id``, ``host``, ``fqdn``, ``version``, ``working_set``, and ``memory``. ''' hostname = socket.gethostname() aliases = () ipaddrs = () # This sequence isn't 100% reliable. We might try a socket() # sequence like RedisBase._ipaddress(), or just decide that # socket.fqdn() and/or socket.gethostname() is good enough. try: ip = socket.gethostbyname(hostname) except socket.herror: # If you're here, then $(hostname) doesn't resolve. ip = None try: if ip is not None: hostname, aliases, ipaddrs = socket.gethostbyaddr(ip) except socket.herror: # If you're here, then $(hostname) resolves, but the IP # address that results in doesn't reverse-resolve. This # has been observed on OSX at least. ipaddrs = (ip,) env = dict( worker_id=self.worker_id, parent=self.parent, hostname=hostname, aliases=tuple(aliases), ipaddrs=tuple(ipaddrs), fqdn=socket.getfqdn(), version=pkg_resources.get_distribution("rejester").version, # pylint: disable=E1103 working_set=[(dist.key, dist.version) for dist in pkg_resources.WorkingSet()], # pylint: disable=E1103 # config_hash=self.config['config_hash'], # config_json = self.config['config_json'], memory=psutil.virtual_memory(), pid=os.getpid(), ) return env
python
def environment(self): '''Get raw data about this worker. This is recorded in the :meth:`heartbeat` info, and can be retrieved by :meth:`TaskMaster.get_heartbeat`. The dictionary includes keys ``worker_id``, ``host``, ``fqdn``, ``version``, ``working_set``, and ``memory``. ''' hostname = socket.gethostname() aliases = () ipaddrs = () # This sequence isn't 100% reliable. We might try a socket() # sequence like RedisBase._ipaddress(), or just decide that # socket.fqdn() and/or socket.gethostname() is good enough. try: ip = socket.gethostbyname(hostname) except socket.herror: # If you're here, then $(hostname) doesn't resolve. ip = None try: if ip is not None: hostname, aliases, ipaddrs = socket.gethostbyaddr(ip) except socket.herror: # If you're here, then $(hostname) resolves, but the IP # address that results in doesn't reverse-resolve. This # has been observed on OSX at least. ipaddrs = (ip,) env = dict( worker_id=self.worker_id, parent=self.parent, hostname=hostname, aliases=tuple(aliases), ipaddrs=tuple(ipaddrs), fqdn=socket.getfqdn(), version=pkg_resources.get_distribution("rejester").version, # pylint: disable=E1103 working_set=[(dist.key, dist.version) for dist in pkg_resources.WorkingSet()], # pylint: disable=E1103 # config_hash=self.config['config_hash'], # config_json = self.config['config_json'], memory=psutil.virtual_memory(), pid=os.getpid(), ) return env
[ "def", "environment", "(", "self", ")", ":", "hostname", "=", "socket", ".", "gethostname", "(", ")", "aliases", "=", "(", ")", "ipaddrs", "=", "(", ")", "# This sequence isn't 100% reliable. We might try a socket()", "# sequence like RedisBase._ipaddress(), or just decide that", "# socket.fqdn() and/or socket.gethostname() is good enough.", "try", ":", "ip", "=", "socket", ".", "gethostbyname", "(", "hostname", ")", "except", "socket", ".", "herror", ":", "# If you're here, then $(hostname) doesn't resolve.", "ip", "=", "None", "try", ":", "if", "ip", "is", "not", "None", ":", "hostname", ",", "aliases", ",", "ipaddrs", "=", "socket", ".", "gethostbyaddr", "(", "ip", ")", "except", "socket", ".", "herror", ":", "# If you're here, then $(hostname) resolves, but the IP", "# address that results in doesn't reverse-resolve. This", "# has been observed on OSX at least.", "ipaddrs", "=", "(", "ip", ",", ")", "env", "=", "dict", "(", "worker_id", "=", "self", ".", "worker_id", ",", "parent", "=", "self", ".", "parent", ",", "hostname", "=", "hostname", ",", "aliases", "=", "tuple", "(", "aliases", ")", ",", "ipaddrs", "=", "tuple", "(", "ipaddrs", ")", ",", "fqdn", "=", "socket", ".", "getfqdn", "(", ")", ",", "version", "=", "pkg_resources", ".", "get_distribution", "(", "\"rejester\"", ")", ".", "version", ",", "# pylint: disable=E1103", "working_set", "=", "[", "(", "dist", ".", "key", ",", "dist", ".", "version", ")", "for", "dist", "in", "pkg_resources", ".", "WorkingSet", "(", ")", "]", ",", "# pylint: disable=E1103", "# config_hash=self.config['config_hash'],", "# config_json = self.config['config_json'],", "memory", "=", "psutil", ".", "virtual_memory", "(", ")", ",", "pid", "=", "os", ".", "getpid", "(", ")", ",", ")", "return", "env" ]
Get raw data about this worker. This is recorded in the :meth:`heartbeat` info, and can be retrieved by :meth:`TaskMaster.get_heartbeat`. The dictionary includes keys ``worker_id``, ``host``, ``fqdn``, ``version``, ``working_set``, and ``memory``.
[ "Get", "raw", "data", "about", "this", "worker", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L107-L152
243,133
diffeo/rejester
rejester/_task_master.py
Worker.register
def register(self, parent=None): '''Record the availability of this worker and get a unique identifer. This sets :attr:`worker_id` and calls :meth:`heartbeat`. This cannot be called multiple times without calling :meth:`unregister` in between. ''' if self.worker_id: raise ProgrammerError('Worker.register cannot be called again without first calling unregister; it is not idempotent') self.parent = parent self.worker_id = nice_identifier() self.task_master.worker_id = self.worker_id self.heartbeat() return self.worker_id
python
def register(self, parent=None): '''Record the availability of this worker and get a unique identifer. This sets :attr:`worker_id` and calls :meth:`heartbeat`. This cannot be called multiple times without calling :meth:`unregister` in between. ''' if self.worker_id: raise ProgrammerError('Worker.register cannot be called again without first calling unregister; it is not idempotent') self.parent = parent self.worker_id = nice_identifier() self.task_master.worker_id = self.worker_id self.heartbeat() return self.worker_id
[ "def", "register", "(", "self", ",", "parent", "=", "None", ")", ":", "if", "self", ".", "worker_id", ":", "raise", "ProgrammerError", "(", "'Worker.register cannot be called again without first calling unregister; it is not idempotent'", ")", "self", ".", "parent", "=", "parent", "self", ".", "worker_id", "=", "nice_identifier", "(", ")", "self", ".", "task_master", ".", "worker_id", "=", "self", ".", "worker_id", "self", ".", "heartbeat", "(", ")", "return", "self", ".", "worker_id" ]
Record the availability of this worker and get a unique identifer. This sets :attr:`worker_id` and calls :meth:`heartbeat`. This cannot be called multiple times without calling :meth:`unregister` in between.
[ "Record", "the", "availability", "of", "this", "worker", "and", "get", "a", "unique", "identifer", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L154-L168
243,134
diffeo/rejester
rejester/_task_master.py
Worker.unregister
def unregister(self): '''Remove this worker from the list of available workers. This requires the worker to already have been :meth:`register()`. ''' self.task_master.worker_unregister(self.worker_id) self.task_master.worker_id = None self.worker_id = None
python
def unregister(self): '''Remove this worker from the list of available workers. This requires the worker to already have been :meth:`register()`. ''' self.task_master.worker_unregister(self.worker_id) self.task_master.worker_id = None self.worker_id = None
[ "def", "unregister", "(", "self", ")", ":", "self", ".", "task_master", ".", "worker_unregister", "(", "self", ".", "worker_id", ")", "self", ".", "task_master", ".", "worker_id", "=", "None", "self", ".", "worker_id", "=", "None" ]
Remove this worker from the list of available workers. This requires the worker to already have been :meth:`register()`.
[ "Remove", "this", "worker", "from", "the", "list", "of", "available", "workers", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L170-L178
243,135
diffeo/rejester
rejester/_task_master.py
Worker.heartbeat
def heartbeat(self): '''Record the current worker state in the registry. This records the worker's current mode, plus the contents of :meth:`environment`, in the data store for inspection by others. :returns mode: Current mode, as :meth:`TaskMaster.get_mode` ''' mode = self.task_master.get_mode() self.task_master.worker_heartbeat(self.worker_id, mode, self.lifetime, self.environment(), parent=self.parent) return mode
python
def heartbeat(self): '''Record the current worker state in the registry. This records the worker's current mode, plus the contents of :meth:`environment`, in the data store for inspection by others. :returns mode: Current mode, as :meth:`TaskMaster.get_mode` ''' mode = self.task_master.get_mode() self.task_master.worker_heartbeat(self.worker_id, mode, self.lifetime, self.environment(), parent=self.parent) return mode
[ "def", "heartbeat", "(", "self", ")", ":", "mode", "=", "self", ".", "task_master", ".", "get_mode", "(", ")", "self", ".", "task_master", ".", "worker_heartbeat", "(", "self", ".", "worker_id", ",", "mode", ",", "self", ".", "lifetime", ",", "self", ".", "environment", "(", ")", ",", "parent", "=", "self", ".", "parent", ")", "return", "mode" ]
Record the current worker state in the registry. This records the worker's current mode, plus the contents of :meth:`environment`, in the data store for inspection by others. :returns mode: Current mode, as :meth:`TaskMaster.get_mode`
[ "Record", "the", "current", "worker", "state", "in", "the", "registry", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L180-L193
243,136
diffeo/rejester
rejester/_task_master.py
WorkUnit.spec
def spec(self): '''Actual work spec. This is retrieved from the database on first use, and in some cases a worker can be mildly more efficient if it avoids using this. ''' if self._spec_cache is None: self._spec_cache = self.registry.get( WORK_SPECS, self.work_spec_name) return self._spec_cache
python
def spec(self): '''Actual work spec. This is retrieved from the database on first use, and in some cases a worker can be mildly more efficient if it avoids using this. ''' if self._spec_cache is None: self._spec_cache = self.registry.get( WORK_SPECS, self.work_spec_name) return self._spec_cache
[ "def", "spec", "(", "self", ")", ":", "if", "self", ".", "_spec_cache", "is", "None", ":", "self", ".", "_spec_cache", "=", "self", ".", "registry", ".", "get", "(", "WORK_SPECS", ",", "self", ".", "work_spec_name", ")", "return", "self", ".", "_spec_cache" ]
Actual work spec. This is retrieved from the database on first use, and in some cases a worker can be mildly more efficient if it avoids using this.
[ "Actual", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L284-L295
243,137
diffeo/rejester
rejester/_task_master.py
WorkUnit.module
def module(self): '''Python module to run the job. This is used by :func:`run` and the standard worker system. If the work spec contains keys ``module``, ``run_function``, and ``terminate_function``, then this contains the Python module object named as ``module``; otherwise this contains :const:`None`. ''' if self._module_cache is None: funclist = filter(None, (self.spec.get('run_function'), self.spec.get('terminate_function'))) if funclist: try: self._module_cache = __import__( self.spec['module'], globals(), (), funclist, -1) except Exception: logger.error('failed to load spec["module"] = %r', self.spec['module'], exc_info=True) raise return self._module_cache
python
def module(self): '''Python module to run the job. This is used by :func:`run` and the standard worker system. If the work spec contains keys ``module``, ``run_function``, and ``terminate_function``, then this contains the Python module object named as ``module``; otherwise this contains :const:`None`. ''' if self._module_cache is None: funclist = filter(None, (self.spec.get('run_function'), self.spec.get('terminate_function'))) if funclist: try: self._module_cache = __import__( self.spec['module'], globals(), (), funclist, -1) except Exception: logger.error('failed to load spec["module"] = %r', self.spec['module'], exc_info=True) raise return self._module_cache
[ "def", "module", "(", "self", ")", ":", "if", "self", ".", "_module_cache", "is", "None", ":", "funclist", "=", "filter", "(", "None", ",", "(", "self", ".", "spec", ".", "get", "(", "'run_function'", ")", ",", "self", ".", "spec", ".", "get", "(", "'terminate_function'", ")", ")", ")", "if", "funclist", ":", "try", ":", "self", ".", "_module_cache", "=", "__import__", "(", "self", ".", "spec", "[", "'module'", "]", ",", "globals", "(", ")", ",", "(", ")", ",", "funclist", ",", "-", "1", ")", "except", "Exception", ":", "logger", ".", "error", "(", "'failed to load spec[\"module\"] = %r'", ",", "self", ".", "spec", "[", "'module'", "]", ",", "exc_info", "=", "True", ")", "raise", "return", "self", ".", "_module_cache" ]
Python module to run the job. This is used by :func:`run` and the standard worker system. If the work spec contains keys ``module``, ``run_function``, and ``terminate_function``, then this contains the Python module object named as ``module``; otherwise this contains :const:`None`.
[ "Python", "module", "to", "run", "the", "job", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L298-L319
243,138
diffeo/rejester
rejester/_task_master.py
WorkUnit.run
def run(self): '''Actually runs the work unit. This is called by the standard worker system, generally once per work unit. It requires the work spec to contain keys ``module``, ``run_function``, and ``terminate_function``. It looks up ``run_function`` in :attr:`module` and calls that function with :const:`self` as its only parameter. ''' try: logger.info('running work unit {0}'.format(self.key)) run_function = getattr(self.module, self.spec['run_function']) ret_val = run_function(self) self.update() logger.info('completed work unit {0}'.format(self.key)) return ret_val except LostLease: logger.warning('work unit {0} timed out'.format(self.key)) raise except Exception: logger.error('work unit {0} failed'.format(self.key), exc_info=True) raise
python
def run(self): '''Actually runs the work unit. This is called by the standard worker system, generally once per work unit. It requires the work spec to contain keys ``module``, ``run_function``, and ``terminate_function``. It looks up ``run_function`` in :attr:`module` and calls that function with :const:`self` as its only parameter. ''' try: logger.info('running work unit {0}'.format(self.key)) run_function = getattr(self.module, self.spec['run_function']) ret_val = run_function(self) self.update() logger.info('completed work unit {0}'.format(self.key)) return ret_val except LostLease: logger.warning('work unit {0} timed out'.format(self.key)) raise except Exception: logger.error('work unit {0} failed'.format(self.key), exc_info=True) raise
[ "def", "run", "(", "self", ")", ":", "try", ":", "logger", ".", "info", "(", "'running work unit {0}'", ".", "format", "(", "self", ".", "key", ")", ")", "run_function", "=", "getattr", "(", "self", ".", "module", ",", "self", ".", "spec", "[", "'run_function'", "]", ")", "ret_val", "=", "run_function", "(", "self", ")", "self", ".", "update", "(", ")", "logger", ".", "info", "(", "'completed work unit {0}'", ".", "format", "(", "self", ".", "key", ")", ")", "return", "ret_val", "except", "LostLease", ":", "logger", ".", "warning", "(", "'work unit {0} timed out'", ".", "format", "(", "self", ".", "key", ")", ")", "raise", "except", "Exception", ":", "logger", ".", "error", "(", "'work unit {0} failed'", ".", "format", "(", "self", ".", "key", ")", ",", "exc_info", "=", "True", ")", "raise" ]
Actually runs the work unit. This is called by the standard worker system, generally once per work unit. It requires the work spec to contain keys ``module``, ``run_function``, and ``terminate_function``. It looks up ``run_function`` in :attr:`module` and calls that function with :const:`self` as its only parameter.
[ "Actually", "runs", "the", "work", "unit", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L321-L344
243,139
diffeo/rejester
rejester/_task_master.py
WorkUnit.terminate
def terminate(self): '''Kills the work unit. This is called by the standard worker system, but only in response to an operating system signal. If the job does setup such as creating a child process, its terminate function should kill that child process. More specifically, this function requires the work spec to contain the keys ``module``, ``run_function``, and ``terminate_function``, and calls ``terminate_function`` in :attr:`module` containing :const:`self` as its only parameter. ''' terminate_function_name = self.spec.get('terminate_function') if not terminate_function_name: logger.error('tried to terminate WorkUnit(%r) but no ' 'function name', self.key) return None terminate_function = getattr(self.module, self.spec['terminate_function']) if not terminate_function: logger.error('tried to terminate WorkUnit(%r) but no ' 'function %s in module %r', self.key, terminate_function_name, self.module.__name__) return None logger.info('calling terminate function for work unit {0}' .format(self.key)) ret_val = terminate_function(self) self.update(lease_time=-10) return ret_val
python
def terminate(self): '''Kills the work unit. This is called by the standard worker system, but only in response to an operating system signal. If the job does setup such as creating a child process, its terminate function should kill that child process. More specifically, this function requires the work spec to contain the keys ``module``, ``run_function``, and ``terminate_function``, and calls ``terminate_function`` in :attr:`module` containing :const:`self` as its only parameter. ''' terminate_function_name = self.spec.get('terminate_function') if not terminate_function_name: logger.error('tried to terminate WorkUnit(%r) but no ' 'function name', self.key) return None terminate_function = getattr(self.module, self.spec['terminate_function']) if not terminate_function: logger.error('tried to terminate WorkUnit(%r) but no ' 'function %s in module %r', self.key, terminate_function_name, self.module.__name__) return None logger.info('calling terminate function for work unit {0}' .format(self.key)) ret_val = terminate_function(self) self.update(lease_time=-10) return ret_val
[ "def", "terminate", "(", "self", ")", ":", "terminate_function_name", "=", "self", ".", "spec", ".", "get", "(", "'terminate_function'", ")", "if", "not", "terminate_function_name", ":", "logger", ".", "error", "(", "'tried to terminate WorkUnit(%r) but no '", "'function name'", ",", "self", ".", "key", ")", "return", "None", "terminate_function", "=", "getattr", "(", "self", ".", "module", ",", "self", ".", "spec", "[", "'terminate_function'", "]", ")", "if", "not", "terminate_function", ":", "logger", ".", "error", "(", "'tried to terminate WorkUnit(%r) but no '", "'function %s in module %r'", ",", "self", ".", "key", ",", "terminate_function_name", ",", "self", ".", "module", ".", "__name__", ")", "return", "None", "logger", ".", "info", "(", "'calling terminate function for work unit {0}'", ".", "format", "(", "self", ".", "key", ")", ")", "ret_val", "=", "terminate_function", "(", "self", ")", "self", ".", "update", "(", "lease_time", "=", "-", "10", ")", "return", "ret_val" ]
Kills the work unit. This is called by the standard worker system, but only in response to an operating system signal. If the job does setup such as creating a child process, its terminate function should kill that child process. More specifically, this function requires the work spec to contain the keys ``module``, ``run_function``, and ``terminate_function``, and calls ``terminate_function`` in :attr:`module` containing :const:`self` as its only parameter.
[ "Kills", "the", "work", "unit", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L346-L376
243,140
diffeo/rejester
rejester/_task_master.py
WorkUnit._refresh
def _refresh(self, session, stopping=False): '''Get this task's current state. This must be called under the registry's lock. It updates the :attr:`finished` and :attr:`failed` flags and the :attr:`data` dictionary based on the current state in the registry. In the normal case, nothing will change and this function will return normally. If it turns out that the work unit is already finished, the state of this object will change before :exc:`rejester.exceptions.LostLease` is raised. :param session: locked registry session :param stopping: don't raise if the work unit is finished :raises rejester.exceptions.LostLease: if this worker is no longer doing this work unit ''' data = session.get( WORK_UNITS_ + self.work_spec_name + _FINISHED, self.key) if data is not None: self.finished = True self.data = data if not stopping: raise LostLease('work unit is already finished') return self.finished = False data = session.get( WORK_UNITS_ + self.work_spec_name + _FAILED, self.key) if data is not None: self.failed = True self.data = data if not stopping: raise LostLease('work unit has already failed') return self.failed = False # (You need a pretty specific sequence of events to get here) data = session.get( WORK_UNITS_ + self.work_spec_name + _BLOCKED, self.key) if data is not None: self.data = data raise LostLease('work unit now blocked by others') worker_id = session.get( WORK_UNITS_ + self.work_spec_name + '_locks', self.key) if worker_id != self.worker_id: raise LostLease('work unit claimed by %r', worker_id) # NB: We could check the priority here, but don't. # If at this point we're technically overtime but nobody # else has started doing work yet, since we're under the # global lock, we can get away with finishing whatever # transition we were going to try to do. data = session.get( WORK_UNITS_ + self.work_spec_name, self.key) if data is None: raise NoSuchWorkUnitError('work unit is gone') # Since we should still own the work unit, any changes # in data should be on our end; do not touch it return
python
def _refresh(self, session, stopping=False): '''Get this task's current state. This must be called under the registry's lock. It updates the :attr:`finished` and :attr:`failed` flags and the :attr:`data` dictionary based on the current state in the registry. In the normal case, nothing will change and this function will return normally. If it turns out that the work unit is already finished, the state of this object will change before :exc:`rejester.exceptions.LostLease` is raised. :param session: locked registry session :param stopping: don't raise if the work unit is finished :raises rejester.exceptions.LostLease: if this worker is no longer doing this work unit ''' data = session.get( WORK_UNITS_ + self.work_spec_name + _FINISHED, self.key) if data is not None: self.finished = True self.data = data if not stopping: raise LostLease('work unit is already finished') return self.finished = False data = session.get( WORK_UNITS_ + self.work_spec_name + _FAILED, self.key) if data is not None: self.failed = True self.data = data if not stopping: raise LostLease('work unit has already failed') return self.failed = False # (You need a pretty specific sequence of events to get here) data = session.get( WORK_UNITS_ + self.work_spec_name + _BLOCKED, self.key) if data is not None: self.data = data raise LostLease('work unit now blocked by others') worker_id = session.get( WORK_UNITS_ + self.work_spec_name + '_locks', self.key) if worker_id != self.worker_id: raise LostLease('work unit claimed by %r', worker_id) # NB: We could check the priority here, but don't. # If at this point we're technically overtime but nobody # else has started doing work yet, since we're under the # global lock, we can get away with finishing whatever # transition we were going to try to do. data = session.get( WORK_UNITS_ + self.work_spec_name, self.key) if data is None: raise NoSuchWorkUnitError('work unit is gone') # Since we should still own the work unit, any changes # in data should be on our end; do not touch it return
[ "def", "_refresh", "(", "self", ",", "session", ",", "stopping", "=", "False", ")", ":", "data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_FINISHED", ",", "self", ".", "key", ")", "if", "data", "is", "not", "None", ":", "self", ".", "finished", "=", "True", "self", ".", "data", "=", "data", "if", "not", "stopping", ":", "raise", "LostLease", "(", "'work unit is already finished'", ")", "return", "self", ".", "finished", "=", "False", "data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_FAILED", ",", "self", ".", "key", ")", "if", "data", "is", "not", "None", ":", "self", ".", "failed", "=", "True", "self", ".", "data", "=", "data", "if", "not", "stopping", ":", "raise", "LostLease", "(", "'work unit has already failed'", ")", "return", "self", ".", "failed", "=", "False", "# (You need a pretty specific sequence of events to get here)", "data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_BLOCKED", ",", "self", ".", "key", ")", "if", "data", "is", "not", "None", ":", "self", ".", "data", "=", "data", "raise", "LostLease", "(", "'work unit now blocked by others'", ")", "worker_id", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "'_locks'", ",", "self", ".", "key", ")", "if", "worker_id", "!=", "self", ".", "worker_id", ":", "raise", "LostLease", "(", "'work unit claimed by %r'", ",", "worker_id", ")", "# NB: We could check the priority here, but don't.", "# If at this point we're technically overtime but nobody", "# else has started doing work yet, since we're under the", "# global lock, we can get away with finishing whatever", "# transition we were going to try to do.", "data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", ",", "self", ".", "key", ")", "if", "data", "is", "None", ":", "raise", "NoSuchWorkUnitError", "(", "'work unit is gone'", ")", "# Since we should still own the work unit, any changes", "# in data should be on our end; do not touch it", "return" ]
Get this task's current state. This must be called under the registry's lock. It updates the :attr:`finished` and :attr:`failed` flags and the :attr:`data` dictionary based on the current state in the registry. In the normal case, nothing will change and this function will return normally. If it turns out that the work unit is already finished, the state of this object will change before :exc:`rejester.exceptions.LostLease` is raised. :param session: locked registry session :param stopping: don't raise if the work unit is finished :raises rejester.exceptions.LostLease: if this worker is no longer doing this work unit
[ "Get", "this", "task", "s", "current", "state", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L378-L441
243,141
diffeo/rejester
rejester/_task_master.py
WorkUnit.update
def update(self, lease_time=None): '''Refresh this task's expiration time. This tries to set the task's expiration time to the current time, plus `lease_time` seconds. It requires the job to not already be complete. If `lease_time` is negative, makes the job immediately be available for other workers to run. :param int lease_time: time to extend job lease beyond now :raises rejester.exceptions.LostLease: if the lease has already expired ''' if lease_time is None: lease_time = self.default_lifetime with self.registry.lock(identifier=self.worker_id) as session: self._refresh(session) try: self.expires = time.time() + lease_time session.update( WORK_UNITS_ + self.work_spec_name, {self.key: self.data}, priorities={self.key: self.expires}, locks={self.key: self.worker_id}) except EnvironmentError, exc: raise LostLease(exc)
python
def update(self, lease_time=None): '''Refresh this task's expiration time. This tries to set the task's expiration time to the current time, plus `lease_time` seconds. It requires the job to not already be complete. If `lease_time` is negative, makes the job immediately be available for other workers to run. :param int lease_time: time to extend job lease beyond now :raises rejester.exceptions.LostLease: if the lease has already expired ''' if lease_time is None: lease_time = self.default_lifetime with self.registry.lock(identifier=self.worker_id) as session: self._refresh(session) try: self.expires = time.time() + lease_time session.update( WORK_UNITS_ + self.work_spec_name, {self.key: self.data}, priorities={self.key: self.expires}, locks={self.key: self.worker_id}) except EnvironmentError, exc: raise LostLease(exc)
[ "def", "update", "(", "self", ",", "lease_time", "=", "None", ")", ":", "if", "lease_time", "is", "None", ":", "lease_time", "=", "self", ".", "default_lifetime", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "self", ".", "_refresh", "(", "session", ")", "try", ":", "self", ".", "expires", "=", "time", ".", "time", "(", ")", "+", "lease_time", "session", ".", "update", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", ",", "{", "self", ".", "key", ":", "self", ".", "data", "}", ",", "priorities", "=", "{", "self", ".", "key", ":", "self", ".", "expires", "}", ",", "locks", "=", "{", "self", ".", "key", ":", "self", ".", "worker_id", "}", ")", "except", "EnvironmentError", ",", "exc", ":", "raise", "LostLease", "(", "exc", ")" ]
Refresh this task's expiration time. This tries to set the task's expiration time to the current time, plus `lease_time` seconds. It requires the job to not already be complete. If `lease_time` is negative, makes the job immediately be available for other workers to run. :param int lease_time: time to extend job lease beyond now :raises rejester.exceptions.LostLease: if the lease has already expired
[ "Refresh", "this", "task", "s", "expiration", "time", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L443-L469
243,142
diffeo/rejester
rejester/_task_master.py
WorkUnit.finish
def finish(self): '''Move this work unit to a finished state. In the standard worker system, the worker calls this on the job's behalf when :meth:`run_function` returns successfully. :raises rejester.exceptions.LostLease: if the lease has already expired ''' with self.registry.lock(identifier=self.worker_id) as session: self._refresh(session, stopping=True) if self.finished or self.failed: return session.move( WORK_UNITS_ + self.work_spec_name, WORK_UNITS_ + self.work_spec_name + _FINISHED, {self.key: self.data}) session.popmany( WORK_UNITS_ + self.work_spec_name + '_locks', self.key, self.worker_id) blocks = session.get( WORK_UNITS_ + self.work_spec_name + _BLOCKS, self.key) if blocks is not None: for block in blocks: spec = block[0] unit = block[1] # hard = block[2] depends = session.get(WORK_UNITS_ + spec + _DEPENDS, unit) if depends is None: continue depends.remove([self.work_spec_name, self.key]) if len(depends) == 0: session.popmany(WORK_UNITS_ + spec + _DEPENDS, unit) unitdef = session.get(WORK_UNITS_ + spec + _BLOCKED, unit) session.move(WORK_UNITS_ + spec + _BLOCKED, WORK_UNITS_ + spec, {unit: unitdef}) else: session.set(WORK_UNITS_ + spec + _DEPENDS, unit, depends) self.finished = True
python
def finish(self): '''Move this work unit to a finished state. In the standard worker system, the worker calls this on the job's behalf when :meth:`run_function` returns successfully. :raises rejester.exceptions.LostLease: if the lease has already expired ''' with self.registry.lock(identifier=self.worker_id) as session: self._refresh(session, stopping=True) if self.finished or self.failed: return session.move( WORK_UNITS_ + self.work_spec_name, WORK_UNITS_ + self.work_spec_name + _FINISHED, {self.key: self.data}) session.popmany( WORK_UNITS_ + self.work_spec_name + '_locks', self.key, self.worker_id) blocks = session.get( WORK_UNITS_ + self.work_spec_name + _BLOCKS, self.key) if blocks is not None: for block in blocks: spec = block[0] unit = block[1] # hard = block[2] depends = session.get(WORK_UNITS_ + spec + _DEPENDS, unit) if depends is None: continue depends.remove([self.work_spec_name, self.key]) if len(depends) == 0: session.popmany(WORK_UNITS_ + spec + _DEPENDS, unit) unitdef = session.get(WORK_UNITS_ + spec + _BLOCKED, unit) session.move(WORK_UNITS_ + spec + _BLOCKED, WORK_UNITS_ + spec, {unit: unitdef}) else: session.set(WORK_UNITS_ + spec + _DEPENDS, unit, depends) self.finished = True
[ "def", "finish", "(", "self", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "self", ".", "_refresh", "(", "session", ",", "stopping", "=", "True", ")", "if", "self", ".", "finished", "or", "self", ".", "failed", ":", "return", "session", ".", "move", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", ",", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_FINISHED", ",", "{", "self", ".", "key", ":", "self", ".", "data", "}", ")", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "'_locks'", ",", "self", ".", "key", ",", "self", ".", "worker_id", ")", "blocks", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_BLOCKS", ",", "self", ".", "key", ")", "if", "blocks", "is", "not", "None", ":", "for", "block", "in", "blocks", ":", "spec", "=", "block", "[", "0", "]", "unit", "=", "block", "[", "1", "]", "# hard = block[2]", "depends", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ")", "if", "depends", "is", "None", ":", "continue", "depends", ".", "remove", "(", "[", "self", ".", "work_spec_name", ",", "self", ".", "key", "]", ")", "if", "len", "(", "depends", ")", "==", "0", ":", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ")", "unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "spec", "+", "_BLOCKED", ",", "unit", ")", "session", ".", "move", "(", "WORK_UNITS_", "+", "spec", "+", "_BLOCKED", ",", "WORK_UNITS_", "+", "spec", ",", "{", "unit", ":", "unitdef", "}", ")", "else", ":", "session", ".", "set", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ",", "depends", ")", "self", ".", "finished", "=", "True" ]
Move this work unit to a finished state. In the standard worker system, the worker calls this on the job's behalf when :meth:`run_function` returns successfully. :raises rejester.exceptions.LostLease: if the lease has already expired
[ "Move", "this", "work", "unit", "to", "a", "finished", "state", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L471-L515
243,143
diffeo/rejester
rejester/_task_master.py
WorkUnit.fail
def fail(self, exc=None): '''Move this work unit to a failed state. In the standard worker system, the worker calls this on the job's behalf when :meth:`run_function` ends with any exception: .. code-block:: python try: work_unit.run() work_unit.finish() except Exception, e: work_unit.fail(e) A ``traceback`` property is recorded with a formatted version of `exc`, if any. :param exc: Exception that caused the failure, or :const:`None` :raises rejester.exceptions.LostLease: if the lease has already expired ''' with self.registry.lock(identifier=self.worker_id) as session: self._refresh(session, stopping=True) if self.finished or self.failed: return if exc: self.data['traceback'] = traceback.format_exc(exc) else: self.data['traceback'] = None session.move( WORK_UNITS_ + self.work_spec_name, WORK_UNITS_ + self.work_spec_name + _FAILED, {self.key: self.data}) session.popmany( WORK_UNITS_ + self.work_spec_name + '_locks', self.key, self.worker_id) blocks = session.get(WORK_UNITS_ + self.work_spec_name + _BLOCKS, self.key) if blocks is not None: for block in blocks: spec = block[0] unit = block[1] hard = block[2] if hard: session.popmany(WORK_UNITS_ + spec + _DEPENDS, unit) unitdef = session.get(WORK_UNITS_ + spec + _BLOCKED, unit) if unitdef is not None: session.move(WORK_UNITS_ + spec + _BLOCKED, WORK_UNITS_ + spec + _FAILED, {unit: unitdef}) else: depends = session.get(WORK_UNITS_ + spec + _DEPENDS, unit) if depends is None: continue depends.remove([self.work_spec_name, self.key]) if len(depends) == 0: session.popmany(WORK_UNITS_ + spec + _DEPENDS, unit) unitdef = session.get(WORK_UNITS_ + spec + _BLOCKED, unit) if unitdef is not None: session.move(WORK_UNITS_ + spec + _BLOCKED, WORK_UNITS_ + spec, {unit: unitdef}) else: session.set(WORK_UNITS_ + spec + _DEPENDS, unit, depends) self.failed = True
python
def fail(self, exc=None): '''Move this work unit to a failed state. In the standard worker system, the worker calls this on the job's behalf when :meth:`run_function` ends with any exception: .. code-block:: python try: work_unit.run() work_unit.finish() except Exception, e: work_unit.fail(e) A ``traceback`` property is recorded with a formatted version of `exc`, if any. :param exc: Exception that caused the failure, or :const:`None` :raises rejester.exceptions.LostLease: if the lease has already expired ''' with self.registry.lock(identifier=self.worker_id) as session: self._refresh(session, stopping=True) if self.finished or self.failed: return if exc: self.data['traceback'] = traceback.format_exc(exc) else: self.data['traceback'] = None session.move( WORK_UNITS_ + self.work_spec_name, WORK_UNITS_ + self.work_spec_name + _FAILED, {self.key: self.data}) session.popmany( WORK_UNITS_ + self.work_spec_name + '_locks', self.key, self.worker_id) blocks = session.get(WORK_UNITS_ + self.work_spec_name + _BLOCKS, self.key) if blocks is not None: for block in blocks: spec = block[0] unit = block[1] hard = block[2] if hard: session.popmany(WORK_UNITS_ + spec + _DEPENDS, unit) unitdef = session.get(WORK_UNITS_ + spec + _BLOCKED, unit) if unitdef is not None: session.move(WORK_UNITS_ + spec + _BLOCKED, WORK_UNITS_ + spec + _FAILED, {unit: unitdef}) else: depends = session.get(WORK_UNITS_ + spec + _DEPENDS, unit) if depends is None: continue depends.remove([self.work_spec_name, self.key]) if len(depends) == 0: session.popmany(WORK_UNITS_ + spec + _DEPENDS, unit) unitdef = session.get(WORK_UNITS_ + spec + _BLOCKED, unit) if unitdef is not None: session.move(WORK_UNITS_ + spec + _BLOCKED, WORK_UNITS_ + spec, {unit: unitdef}) else: session.set(WORK_UNITS_ + spec + _DEPENDS, unit, depends) self.failed = True
[ "def", "fail", "(", "self", ",", "exc", "=", "None", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "self", ".", "_refresh", "(", "session", ",", "stopping", "=", "True", ")", "if", "self", ".", "finished", "or", "self", ".", "failed", ":", "return", "if", "exc", ":", "self", ".", "data", "[", "'traceback'", "]", "=", "traceback", ".", "format_exc", "(", "exc", ")", "else", ":", "self", ".", "data", "[", "'traceback'", "]", "=", "None", "session", ".", "move", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", ",", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_FAILED", ",", "{", "self", ".", "key", ":", "self", ".", "data", "}", ")", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "'_locks'", ",", "self", ".", "key", ",", "self", ".", "worker_id", ")", "blocks", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "self", ".", "work_spec_name", "+", "_BLOCKS", ",", "self", ".", "key", ")", "if", "blocks", "is", "not", "None", ":", "for", "block", "in", "blocks", ":", "spec", "=", "block", "[", "0", "]", "unit", "=", "block", "[", "1", "]", "hard", "=", "block", "[", "2", "]", "if", "hard", ":", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ")", "unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "spec", "+", "_BLOCKED", ",", "unit", ")", "if", "unitdef", "is", "not", "None", ":", "session", ".", "move", "(", "WORK_UNITS_", "+", "spec", "+", "_BLOCKED", ",", "WORK_UNITS_", "+", "spec", "+", "_FAILED", ",", "{", "unit", ":", "unitdef", "}", ")", "else", ":", "depends", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ")", "if", "depends", "is", "None", ":", "continue", "depends", ".", "remove", "(", "[", "self", ".", "work_spec_name", ",", "self", ".", "key", "]", ")", "if", "len", "(", "depends", ")", "==", "0", ":", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ")", "unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "spec", "+", "_BLOCKED", ",", "unit", ")", "if", "unitdef", "is", "not", "None", ":", "session", ".", "move", "(", "WORK_UNITS_", "+", "spec", "+", "_BLOCKED", ",", "WORK_UNITS_", "+", "spec", ",", "{", "unit", ":", "unitdef", "}", ")", "else", ":", "session", ".", "set", "(", "WORK_UNITS_", "+", "spec", "+", "_DEPENDS", ",", "unit", ",", "depends", ")", "self", ".", "failed", "=", "True" ]
Move this work unit to a failed state. In the standard worker system, the worker calls this on the job's behalf when :meth:`run_function` ends with any exception: .. code-block:: python try: work_unit.run() work_unit.finish() except Exception, e: work_unit.fail(e) A ``traceback`` property is recorded with a formatted version of `exc`, if any. :param exc: Exception that caused the failure, or :const:`None` :raises rejester.exceptions.LostLease: if the lease has already expired
[ "Move", "this", "work", "unit", "to", "a", "failed", "state", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L517-L586
243,144
diffeo/rejester
rejester/_task_master.py
TaskMaster.set_mode
def set_mode(self, mode): '''Set the global mode of the rejester system. This must be one of the constants :attr:`TERMINATE`, :attr:`RUN`, or :attr:`IDLE`. :attr:`TERMINATE` instructs any running workers to do an orderly shutdown, completing current jobs then exiting. :attr:`IDLE` instructs workers to stay running but not start new jobs. :attr:`RUN` tells workers to do actual work. :param str mode: new rejester mode :raise rejester.exceptions.ProgrammerError: on invalid `mode` ''' if mode not in [self.TERMINATE, self.RUN, self.IDLE]: raise ProgrammerError('mode=%r is not recognized' % mode) with self.registry.lock(identifier=self.worker_id) as session: session.set('modes', 'mode', mode) logger.info('set mode to %s', mode)
python
def set_mode(self, mode): '''Set the global mode of the rejester system. This must be one of the constants :attr:`TERMINATE`, :attr:`RUN`, or :attr:`IDLE`. :attr:`TERMINATE` instructs any running workers to do an orderly shutdown, completing current jobs then exiting. :attr:`IDLE` instructs workers to stay running but not start new jobs. :attr:`RUN` tells workers to do actual work. :param str mode: new rejester mode :raise rejester.exceptions.ProgrammerError: on invalid `mode` ''' if mode not in [self.TERMINATE, self.RUN, self.IDLE]: raise ProgrammerError('mode=%r is not recognized' % mode) with self.registry.lock(identifier=self.worker_id) as session: session.set('modes', 'mode', mode) logger.info('set mode to %s', mode)
[ "def", "set_mode", "(", "self", ",", "mode", ")", ":", "if", "mode", "not", "in", "[", "self", ".", "TERMINATE", ",", "self", ".", "RUN", ",", "self", ".", "IDLE", "]", ":", "raise", "ProgrammerError", "(", "'mode=%r is not recognized'", "%", "mode", ")", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "session", ".", "set", "(", "'modes'", ",", "'mode'", ",", "mode", ")", "logger", ".", "info", "(", "'set mode to %s'", ",", "mode", ")" ]
Set the global mode of the rejester system. This must be one of the constants :attr:`TERMINATE`, :attr:`RUN`, or :attr:`IDLE`. :attr:`TERMINATE` instructs any running workers to do an orderly shutdown, completing current jobs then exiting. :attr:`IDLE` instructs workers to stay running but not start new jobs. :attr:`RUN` tells workers to do actual work. :param str mode: new rejester mode :raise rejester.exceptions.ProgrammerError: on invalid `mode`
[ "Set", "the", "global", "mode", "of", "the", "rejester", "system", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L717-L735
243,145
diffeo/rejester
rejester/_task_master.py
TaskMaster.mode_counts
def mode_counts(self): '''Get the number of workers in each mode. This returns a dictionary where the keys are mode constants and the values are a simple integer count of the number of workers in that mode. ''' modes = {self.RUN: 0, self.IDLE: 0, self.TERMINATE: 0} for worker_id, mode in self.workers().items(): modes[mode] += 1 return modes
python
def mode_counts(self): '''Get the number of workers in each mode. This returns a dictionary where the keys are mode constants and the values are a simple integer count of the number of workers in that mode. ''' modes = {self.RUN: 0, self.IDLE: 0, self.TERMINATE: 0} for worker_id, mode in self.workers().items(): modes[mode] += 1 return modes
[ "def", "mode_counts", "(", "self", ")", ":", "modes", "=", "{", "self", ".", "RUN", ":", "0", ",", "self", ".", "IDLE", ":", "0", ",", "self", ".", "TERMINATE", ":", "0", "}", "for", "worker_id", ",", "mode", "in", "self", ".", "workers", "(", ")", ".", "items", "(", ")", ":", "modes", "[", "mode", "]", "+=", "1", "return", "modes" ]
Get the number of workers in each mode. This returns a dictionary where the keys are mode constants and the values are a simple integer count of the number of workers in that mode.
[ "Get", "the", "number", "of", "workers", "in", "each", "mode", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L773-L784
243,146
diffeo/rejester
rejester/_task_master.py
TaskMaster.workers
def workers(self, alive=True): '''Get a listing of all workers. This returns a dictionary mapping worker ID to the mode constant for their last observed mode. :param bool alive: if true (default), only include workers that have called :meth:`Worker.heartbeat` sufficiently recently ''' return self.registry.filter( WORKER_OBSERVED_MODE, priority_min=alive and time.time() or '-inf')
python
def workers(self, alive=True): '''Get a listing of all workers. This returns a dictionary mapping worker ID to the mode constant for their last observed mode. :param bool alive: if true (default), only include workers that have called :meth:`Worker.heartbeat` sufficiently recently ''' return self.registry.filter( WORKER_OBSERVED_MODE, priority_min=alive and time.time() or '-inf')
[ "def", "workers", "(", "self", ",", "alive", "=", "True", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORKER_OBSERVED_MODE", ",", "priority_min", "=", "alive", "and", "time", ".", "time", "(", ")", "or", "'-inf'", ")" ]
Get a listing of all workers. This returns a dictionary mapping worker ID to the mode constant for their last observed mode. :param bool alive: if true (default), only include workers that have called :meth:`Worker.heartbeat` sufficiently recently
[ "Get", "a", "listing", "of", "all", "workers", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L786-L798
243,147
diffeo/rejester
rejester/_task_master.py
TaskMaster.dump
def dump(self): '''Print the entire contents of this to debug log messages. This is really only intended for debugging. It could produce a lot of data. ''' with self.registry.lock(identifier=self.worker_id) as session: for work_spec_name in self.registry.pull(NICE_LEVELS).iterkeys(): def scan(sfx): v = self.registry.pull(WORK_UNITS_ + work_spec_name + sfx) if v is None: return [] return v.keys() for key in scan(''): logger.debug('spec {0} unit {1} available or pending' .format(work_spec_name, key)) for key in scan(_BLOCKED): blocked_on = session.get( WORK_UNITS_ + work_spec_name + _DEPENDS, key) logger.debug('spec {0} unit {1} blocked on {2!r}' .format(work_spec_name, key, blocked_on)) for key in scan(_FINISHED): logger.debug('spec {0} unit {1} finished' .format(work_spec_name, key)) for key in scan(_FAILED): logger.debug('spec {0} unit {1} failed' .format(work_spec_name, key))
python
def dump(self): '''Print the entire contents of this to debug log messages. This is really only intended for debugging. It could produce a lot of data. ''' with self.registry.lock(identifier=self.worker_id) as session: for work_spec_name in self.registry.pull(NICE_LEVELS).iterkeys(): def scan(sfx): v = self.registry.pull(WORK_UNITS_ + work_spec_name + sfx) if v is None: return [] return v.keys() for key in scan(''): logger.debug('spec {0} unit {1} available or pending' .format(work_spec_name, key)) for key in scan(_BLOCKED): blocked_on = session.get( WORK_UNITS_ + work_spec_name + _DEPENDS, key) logger.debug('spec {0} unit {1} blocked on {2!r}' .format(work_spec_name, key, blocked_on)) for key in scan(_FINISHED): logger.debug('spec {0} unit {1} finished' .format(work_spec_name, key)) for key in scan(_FAILED): logger.debug('spec {0} unit {1} failed' .format(work_spec_name, key))
[ "def", "dump", "(", "self", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "for", "work_spec_name", "in", "self", ".", "registry", ".", "pull", "(", "NICE_LEVELS", ")", ".", "iterkeys", "(", ")", ":", "def", "scan", "(", "sfx", ")", ":", "v", "=", "self", ".", "registry", ".", "pull", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "sfx", ")", "if", "v", "is", "None", ":", "return", "[", "]", "return", "v", ".", "keys", "(", ")", "for", "key", "in", "scan", "(", "''", ")", ":", "logger", ".", "debug", "(", "'spec {0} unit {1} available or pending'", ".", "format", "(", "work_spec_name", ",", "key", ")", ")", "for", "key", "in", "scan", "(", "_BLOCKED", ")", ":", "blocked_on", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_DEPENDS", ",", "key", ")", "logger", ".", "debug", "(", "'spec {0} unit {1} blocked on {2!r}'", ".", "format", "(", "work_spec_name", ",", "key", ",", "blocked_on", ")", ")", "for", "key", "in", "scan", "(", "_FINISHED", ")", ":", "logger", ".", "debug", "(", "'spec {0} unit {1} finished'", ".", "format", "(", "work_spec_name", ",", "key", ")", ")", "for", "key", "in", "scan", "(", "_FAILED", ")", ":", "logger", ".", "debug", "(", "'spec {0} unit {1} failed'", ".", "format", "(", "work_spec_name", ",", "key", ")", ")" ]
Print the entire contents of this to debug log messages. This is really only intended for debugging. It could produce a lot of data.
[ "Print", "the", "entire", "contents", "of", "this", "to", "debug", "log", "messages", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L814-L841
243,148
diffeo/rejester
rejester/_task_master.py
TaskMaster.validate_work_spec
def validate_work_spec(cls, work_spec): '''Check that `work_spec` is valid. It must at the very minimum contain a ``name`` and ``min_gb``. :raise rejester.exceptions.ProgrammerError: if it isn't valid ''' if 'name' not in work_spec: raise ProgrammerError('work_spec lacks "name"') if 'min_gb' not in work_spec or \ not isinstance(work_spec['min_gb'], (float, int, long)): raise ProgrammerError('work_spec["min_gb"] must be a number')
python
def validate_work_spec(cls, work_spec): '''Check that `work_spec` is valid. It must at the very minimum contain a ``name`` and ``min_gb``. :raise rejester.exceptions.ProgrammerError: if it isn't valid ''' if 'name' not in work_spec: raise ProgrammerError('work_spec lacks "name"') if 'min_gb' not in work_spec or \ not isinstance(work_spec['min_gb'], (float, int, long)): raise ProgrammerError('work_spec["min_gb"] must be a number')
[ "def", "validate_work_spec", "(", "cls", ",", "work_spec", ")", ":", "if", "'name'", "not", "in", "work_spec", ":", "raise", "ProgrammerError", "(", "'work_spec lacks \"name\"'", ")", "if", "'min_gb'", "not", "in", "work_spec", "or", "not", "isinstance", "(", "work_spec", "[", "'min_gb'", "]", ",", "(", "float", ",", "int", ",", "long", ")", ")", ":", "raise", "ProgrammerError", "(", "'work_spec[\"min_gb\"] must be a number'", ")" ]
Check that `work_spec` is valid. It must at the very minimum contain a ``name`` and ``min_gb``. :raise rejester.exceptions.ProgrammerError: if it isn't valid
[ "Check", "that", "work_spec", "is", "valid", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L844-L856
243,149
diffeo/rejester
rejester/_task_master.py
TaskMaster.num_available
def num_available(self, work_spec_name): '''Get the number of available work units for some work spec. These are work units that could be returned by :meth:`get_work`: they are not complete, not currently executing, and not blocked on some other work unit. ''' return self.registry.len(WORK_UNITS_ + work_spec_name, priority_max=time.time())
python
def num_available(self, work_spec_name): '''Get the number of available work units for some work spec. These are work units that could be returned by :meth:`get_work`: they are not complete, not currently executing, and not blocked on some other work unit. ''' return self.registry.len(WORK_UNITS_ + work_spec_name, priority_max=time.time())
[ "def", "num_available", "(", "self", ",", "work_spec_name", ")", ":", "return", "self", ".", "registry", ".", "len", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "priority_max", "=", "time", ".", "time", "(", ")", ")" ]
Get the number of available work units for some work spec. These are work units that could be returned by :meth:`get_work`: they are not complete, not currently executing, and not blocked on some other work unit.
[ "Get", "the", "number", "of", "available", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L858-L867
243,150
diffeo/rejester
rejester/_task_master.py
TaskMaster.num_pending
def num_pending(self, work_spec_name): '''Get the number of pending work units for some work spec. These are work units that some worker is currently working on (hopefully; it could include work units assigned to workers that died and that have not yet expired). ''' return self.registry.len(WORK_UNITS_ + work_spec_name, priority_min=time.time())
python
def num_pending(self, work_spec_name): '''Get the number of pending work units for some work spec. These are work units that some worker is currently working on (hopefully; it could include work units assigned to workers that died and that have not yet expired). ''' return self.registry.len(WORK_UNITS_ + work_spec_name, priority_min=time.time())
[ "def", "num_pending", "(", "self", ",", "work_spec_name", ")", ":", "return", "self", ".", "registry", ".", "len", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "priority_min", "=", "time", ".", "time", "(", ")", ")" ]
Get the number of pending work units for some work spec. These are work units that some worker is currently working on (hopefully; it could include work units assigned to workers that died and that have not yet expired).
[ "Get", "the", "number", "of", "pending", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L869-L878
243,151
diffeo/rejester
rejester/_task_master.py
TaskMaster.num_tasks
def num_tasks(self, work_spec_name): '''Get the total number of work units for some work spec.''' return self.num_finished(work_spec_name) + \ self.num_failed(work_spec_name) + \ self.registry.len(WORK_UNITS_ + work_spec_name)
python
def num_tasks(self, work_spec_name): '''Get the total number of work units for some work spec.''' return self.num_finished(work_spec_name) + \ self.num_failed(work_spec_name) + \ self.registry.len(WORK_UNITS_ + work_spec_name)
[ "def", "num_tasks", "(", "self", ",", "work_spec_name", ")", ":", "return", "self", ".", "num_finished", "(", "work_spec_name", ")", "+", "self", ".", "num_failed", "(", "work_spec_name", ")", "+", "self", ".", "registry", ".", "len", "(", "WORK_UNITS_", "+", "work_spec_name", ")" ]
Get the total number of work units for some work spec.
[ "Get", "the", "total", "number", "of", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L906-L910
243,152
diffeo/rejester
rejester/_task_master.py
TaskMaster.status
def status(self, work_spec_name): '''Get a summary dictionary for some work spec. The keys are the strings :meth:`num_available`, :meth:`num_pending`, :meth:`num_blocked`, :meth:`num_finished`, :meth:`num_failed`, and :meth:`num_tasks`, and the values are the values returned from those functions. ''' return dict( num_available=self.num_available(work_spec_name), num_pending=self.num_pending(work_spec_name), num_blocked=self.num_blocked(work_spec_name), num_finished=self.num_finished(work_spec_name), num_failed=self.num_failed(work_spec_name), num_tasks=self.num_tasks(work_spec_name), )
python
def status(self, work_spec_name): '''Get a summary dictionary for some work spec. The keys are the strings :meth:`num_available`, :meth:`num_pending`, :meth:`num_blocked`, :meth:`num_finished`, :meth:`num_failed`, and :meth:`num_tasks`, and the values are the values returned from those functions. ''' return dict( num_available=self.num_available(work_spec_name), num_pending=self.num_pending(work_spec_name), num_blocked=self.num_blocked(work_spec_name), num_finished=self.num_finished(work_spec_name), num_failed=self.num_failed(work_spec_name), num_tasks=self.num_tasks(work_spec_name), )
[ "def", "status", "(", "self", ",", "work_spec_name", ")", ":", "return", "dict", "(", "num_available", "=", "self", ".", "num_available", "(", "work_spec_name", ")", ",", "num_pending", "=", "self", ".", "num_pending", "(", "work_spec_name", ")", ",", "num_blocked", "=", "self", ".", "num_blocked", "(", "work_spec_name", ")", ",", "num_finished", "=", "self", ".", "num_finished", "(", "work_spec_name", ")", ",", "num_failed", "=", "self", ".", "num_failed", "(", "work_spec_name", ")", ",", "num_tasks", "=", "self", ".", "num_tasks", "(", "work_spec_name", ")", ",", ")" ]
Get a summary dictionary for some work spec. The keys are the strings :meth:`num_available`, :meth:`num_pending`, :meth:`num_blocked`, :meth:`num_finished`, :meth:`num_failed`, and :meth:`num_tasks`, and the values are the values returned from those functions.
[ "Get", "a", "summary", "dictionary", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L912-L928
243,153
diffeo/rejester
rejester/_task_master.py
TaskMaster.iter_work_specs
def iter_work_specs(self, limit=None, start=None): ''' yield work spec dicts ''' count = 0 ws_list, start = self.list_work_specs(limit, start) while True: for name_spec in ws_list: yield name_spec[1] count += 1 if (limit is not None) and (count >= limit): break if not start: break if limit is not None: limit -= count ws_list, start = self.list_work_specs(limit, start)
python
def iter_work_specs(self, limit=None, start=None): ''' yield work spec dicts ''' count = 0 ws_list, start = self.list_work_specs(limit, start) while True: for name_spec in ws_list: yield name_spec[1] count += 1 if (limit is not None) and (count >= limit): break if not start: break if limit is not None: limit -= count ws_list, start = self.list_work_specs(limit, start)
[ "def", "iter_work_specs", "(", "self", ",", "limit", "=", "None", ",", "start", "=", "None", ")", ":", "count", "=", "0", "ws_list", ",", "start", "=", "self", ".", "list_work_specs", "(", "limit", ",", "start", ")", "while", "True", ":", "for", "name_spec", "in", "ws_list", ":", "yield", "name_spec", "[", "1", "]", "count", "+=", "1", "if", "(", "limit", "is", "not", "None", ")", "and", "(", "count", ">=", "limit", ")", ":", "break", "if", "not", "start", ":", "break", "if", "limit", "is", "not", "None", ":", "limit", "-=", "count", "ws_list", ",", "start", "=", "self", ".", "list_work_specs", "(", "limit", ",", "start", ")" ]
yield work spec dicts
[ "yield", "work", "spec", "dicts" ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L941-L957
243,154
diffeo/rejester
rejester/_task_master.py
TaskMaster.get_work_spec
def get_work_spec(self, work_spec_name): '''Get the dictionary defining some work spec.''' with self.registry.lock(identifier=self.worker_id) as session: return session.get(WORK_SPECS, work_spec_name)
python
def get_work_spec(self, work_spec_name): '''Get the dictionary defining some work spec.''' with self.registry.lock(identifier=self.worker_id) as session: return session.get(WORK_SPECS, work_spec_name)
[ "def", "get_work_spec", "(", "self", ",", "work_spec_name", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "return", "session", ".", "get", "(", "WORK_SPECS", ",", "work_spec_name", ")" ]
Get the dictionary defining some work spec.
[ "Get", "the", "dictionary", "defining", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L959-L962
243,155
diffeo/rejester
rejester/_task_master.py
TaskMaster.list_work_units
def list_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have not been completed ("available" or "pending" work units) are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name, start=start, limit=limit)
python
def list_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have not been completed ("available" or "pending" work units) are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name, start=start, limit=limit)
[ "def", "list_work_units", "(", "self", ",", "work_spec_name", ",", "start", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "start", "=", "start", ",", "limit", "=", "limit", ")" ]
Get a dictionary of work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have not been completed ("available" or "pending" work units) are included.
[ "Get", "a", "dictionary", "of", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1017-L1026
243,156
diffeo/rejester
rejester/_task_master.py
TaskMaster.list_available_work_units
def list_available_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of available work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have not been started, or units that were started but did not complete in a timely fashion, are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name, priority_max=time.time(), start=start, limit=limit)
python
def list_available_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of available work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have not been started, or units that were started but did not complete in a timely fashion, are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name, priority_max=time.time(), start=start, limit=limit)
[ "def", "list_available_work_units", "(", "self", ",", "work_spec_name", ",", "start", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "priority_max", "=", "time", ".", "time", "(", ")", ",", "start", "=", "start", ",", "limit", "=", "limit", ")" ]
Get a dictionary of available work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have not been started, or units that were started but did not complete in a timely fashion, are included.
[ "Get", "a", "dictionary", "of", "available", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1028-L1039
243,157
diffeo/rejester
rejester/_task_master.py
TaskMaster.list_pending_work_units
def list_pending_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of in-progress work units for some work spec. The dictionary is from work unit name to work unit definiton. Units listed here should be worked on by some worker. """ return self.registry.filter(WORK_UNITS_ + work_spec_name, priority_min=time.time(), start=start, limit=limit)
python
def list_pending_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of in-progress work units for some work spec. The dictionary is from work unit name to work unit definiton. Units listed here should be worked on by some worker. """ return self.registry.filter(WORK_UNITS_ + work_spec_name, priority_min=time.time(), start=start, limit=limit)
[ "def", "list_pending_work_units", "(", "self", ",", "work_spec_name", ",", "start", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "priority_min", "=", "time", ".", "time", "(", ")", ",", "start", "=", "start", ",", "limit", "=", "limit", ")" ]
Get a dictionary of in-progress work units for some work spec. The dictionary is from work unit name to work unit definiton. Units listed here should be worked on by some worker.
[ "Get", "a", "dictionary", "of", "in", "-", "progress", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1041-L1050
243,158
diffeo/rejester
rejester/_task_master.py
TaskMaster.list_blocked_work_units
def list_blocked_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of blocked work units for some work spec. The dictionary is from work unit name to work unit definiton. Work units included in this list are blocked because they were listed as the first work unit in :func:`add_dependent_work_units`, and the work unit(s) they depend on have not completed yet. This function does not tell why work units are blocked, it merely returns the fact that they are. """ return self.registry.filter(WORK_UNITS_ + work_spec_name + _BLOCKED, start=start, limit=limit)
python
def list_blocked_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of blocked work units for some work spec. The dictionary is from work unit name to work unit definiton. Work units included in this list are blocked because they were listed as the first work unit in :func:`add_dependent_work_units`, and the work unit(s) they depend on have not completed yet. This function does not tell why work units are blocked, it merely returns the fact that they are. """ return self.registry.filter(WORK_UNITS_ + work_spec_name + _BLOCKED, start=start, limit=limit)
[ "def", "list_blocked_work_units", "(", "self", ",", "work_spec_name", ",", "start", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_BLOCKED", ",", "start", "=", "start", ",", "limit", "=", "limit", ")" ]
Get a dictionary of blocked work units for some work spec. The dictionary is from work unit name to work unit definiton. Work units included in this list are blocked because they were listed as the first work unit in :func:`add_dependent_work_units`, and the work unit(s) they depend on have not completed yet. This function does not tell why work units are blocked, it merely returns the fact that they are.
[ "Get", "a", "dictionary", "of", "blocked", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1052-L1065
243,159
diffeo/rejester
rejester/_task_master.py
TaskMaster.list_finished_work_units
def list_finished_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of finished work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have been successfully completed are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name + _FINISHED, start=start, limit=limit)
python
def list_finished_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of finished work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have been successfully completed are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name + _FINISHED, start=start, limit=limit)
[ "def", "list_finished_work_units", "(", "self", ",", "work_spec_name", ",", "start", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FINISHED", ",", "start", "=", "start", ",", "limit", "=", "limit", ")" ]
Get a dictionary of finished work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have been successfully completed are included.
[ "Get", "a", "dictionary", "of", "finished", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1067-L1076
243,160
diffeo/rejester
rejester/_task_master.py
TaskMaster.list_failed_work_units
def list_failed_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of failed work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have completed unsuccessfully are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name + _FAILED, start=start, limit=limit)
python
def list_failed_work_units(self, work_spec_name, start=0, limit=None): """Get a dictionary of failed work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have completed unsuccessfully are included. """ return self.registry.filter(WORK_UNITS_ + work_spec_name + _FAILED, start=start, limit=limit)
[ "def", "list_failed_work_units", "(", "self", ",", "work_spec_name", ",", "start", "=", "0", ",", "limit", "=", "None", ")", ":", "return", "self", ".", "registry", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FAILED", ",", "start", "=", "start", ",", "limit", "=", "limit", ")" ]
Get a dictionary of failed work units for some work spec. The dictionary is from work unit name to work unit definiton. Only work units that have completed unsuccessfully are included.
[ "Get", "a", "dictionary", "of", "failed", "work", "units", "for", "some", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1078-L1086
243,161
diffeo/rejester
rejester/_task_master.py
TaskMaster._remove_some_work_units
def _remove_some_work_units(self, work_spec_name, work_unit_names, suffix='', priority_min='-inf', priority_max='+inf'): '''Remove some units from somewhere.''' now = time.time() if work_unit_names is None: count = 0 while True: with self.registry.lock(identifier=self.worker_id) as session: names = session.filter( WORK_UNITS_ + work_spec_name + suffix, priority_min=priority_min, priority_max=priority_max, limit=1000) if not names: break count += session.popmany( WORK_UNITS_ + work_spec_name + suffix, *names) else: # TODO: This needs to honor priority_min/priority_max, # otherwise it gets the wrong answer for "available"/ # "pending" (it will get both states). with self.registry.lock(identifier=self.worker_id) as session: count = session.popmany(WORK_UNITS_ + work_spec_name + suffix, *work_unit_names) return count
python
def _remove_some_work_units(self, work_spec_name, work_unit_names, suffix='', priority_min='-inf', priority_max='+inf'): '''Remove some units from somewhere.''' now = time.time() if work_unit_names is None: count = 0 while True: with self.registry.lock(identifier=self.worker_id) as session: names = session.filter( WORK_UNITS_ + work_spec_name + suffix, priority_min=priority_min, priority_max=priority_max, limit=1000) if not names: break count += session.popmany( WORK_UNITS_ + work_spec_name + suffix, *names) else: # TODO: This needs to honor priority_min/priority_max, # otherwise it gets the wrong answer for "available"/ # "pending" (it will get both states). with self.registry.lock(identifier=self.worker_id) as session: count = session.popmany(WORK_UNITS_ + work_spec_name + suffix, *work_unit_names) return count
[ "def", "_remove_some_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_names", ",", "suffix", "=", "''", ",", "priority_min", "=", "'-inf'", ",", "priority_max", "=", "'+inf'", ")", ":", "now", "=", "time", ".", "time", "(", ")", "if", "work_unit_names", "is", "None", ":", "count", "=", "0", "while", "True", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "names", "=", "session", ".", "filter", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "suffix", ",", "priority_min", "=", "priority_min", ",", "priority_max", "=", "priority_max", ",", "limit", "=", "1000", ")", "if", "not", "names", ":", "break", "count", "+=", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "suffix", ",", "*", "names", ")", "else", ":", "# TODO: This needs to honor priority_min/priority_max,", "# otherwise it gets the wrong answer for \"available\"/", "# \"pending\" (it will get both states).", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "count", "=", "session", ".", "popmany", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "suffix", ",", "*", "work_unit_names", ")", "return", "count" ]
Remove some units from somewhere.
[ "Remove", "some", "units", "from", "somewhere", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1088-L1111
243,162
diffeo/rejester
rejester/_task_master.py
TaskMaster.del_work_units
def del_work_units(self, work_spec_name, work_unit_keys=None, state=None, all=False): '''Delete work units from a work spec. The parameters are considered in order as follows: * If `all` is :const:`True`, then all work units in `work_spec_name` are deleted; otherwise * If `state` is not :const:`None`, then all work units in the named state are deleted; otherwise * If `work_unit_keys` are specified, then those specific work units are deleted; otherwise * Nothing is deleted. :param str work_spec_name: name of the work spec :param list work_unit_keys: if not :const:`None`, only delete these specific keys :param str state: only delete work units in this state :param bool all: if true, delete all work units :return: number of work units deleted ''' count = 0 if (state is None) or (state == AVAILABLE): count += self.remove_available_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == PENDING): count += self.remove_pending_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == BLOCKED): count += self.remove_blocked_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == FAILED): count += self.remove_failed_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == FINISHED): count += self.remove_finished_work_units(work_spec_name, work_unit_keys) return count
python
def del_work_units(self, work_spec_name, work_unit_keys=None, state=None, all=False): '''Delete work units from a work spec. The parameters are considered in order as follows: * If `all` is :const:`True`, then all work units in `work_spec_name` are deleted; otherwise * If `state` is not :const:`None`, then all work units in the named state are deleted; otherwise * If `work_unit_keys` are specified, then those specific work units are deleted; otherwise * Nothing is deleted. :param str work_spec_name: name of the work spec :param list work_unit_keys: if not :const:`None`, only delete these specific keys :param str state: only delete work units in this state :param bool all: if true, delete all work units :return: number of work units deleted ''' count = 0 if (state is None) or (state == AVAILABLE): count += self.remove_available_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == PENDING): count += self.remove_pending_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == BLOCKED): count += self.remove_blocked_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == FAILED): count += self.remove_failed_work_units(work_spec_name, work_unit_keys) if (state is None) or (state == FINISHED): count += self.remove_finished_work_units(work_spec_name, work_unit_keys) return count
[ "def", "del_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_keys", "=", "None", ",", "state", "=", "None", ",", "all", "=", "False", ")", ":", "count", "=", "0", "if", "(", "state", "is", "None", ")", "or", "(", "state", "==", "AVAILABLE", ")", ":", "count", "+=", "self", ".", "remove_available_work_units", "(", "work_spec_name", ",", "work_unit_keys", ")", "if", "(", "state", "is", "None", ")", "or", "(", "state", "==", "PENDING", ")", ":", "count", "+=", "self", ".", "remove_pending_work_units", "(", "work_spec_name", ",", "work_unit_keys", ")", "if", "(", "state", "is", "None", ")", "or", "(", "state", "==", "BLOCKED", ")", ":", "count", "+=", "self", ".", "remove_blocked_work_units", "(", "work_spec_name", ",", "work_unit_keys", ")", "if", "(", "state", "is", "None", ")", "or", "(", "state", "==", "FAILED", ")", ":", "count", "+=", "self", ".", "remove_failed_work_units", "(", "work_spec_name", ",", "work_unit_keys", ")", "if", "(", "state", "is", "None", ")", "or", "(", "state", "==", "FINISHED", ")", ":", "count", "+=", "self", ".", "remove_finished_work_units", "(", "work_spec_name", ",", "work_unit_keys", ")", "return", "count" ]
Delete work units from a work spec. The parameters are considered in order as follows: * If `all` is :const:`True`, then all work units in `work_spec_name` are deleted; otherwise * If `state` is not :const:`None`, then all work units in the named state are deleted; otherwise * If `work_unit_keys` are specified, then those specific work units are deleted; otherwise * Nothing is deleted. :param str work_spec_name: name of the work spec :param list work_unit_keys: if not :const:`None`, only delete these specific keys :param str state: only delete work units in this state :param bool all: if true, delete all work units :return: number of work units deleted
[ "Delete", "work", "units", "from", "a", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1113-L1146
243,163
diffeo/rejester
rejester/_task_master.py
TaskMaster.remove_available_work_units
def remove_available_work_units(self, work_spec_name, work_unit_names): '''Remove some work units in the available queue. If `work_unit_names` is :const:`None` (which must be passed explicitly), all available work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, priority_max=time.time())
python
def remove_available_work_units(self, work_spec_name, work_unit_names): '''Remove some work units in the available queue. If `work_unit_names` is :const:`None` (which must be passed explicitly), all available work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, priority_max=time.time())
[ "def", "remove_available_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_names", ")", ":", "return", "self", ".", "_remove_some_work_units", "(", "work_spec_name", ",", "work_unit_names", ",", "priority_max", "=", "time", ".", "time", "(", ")", ")" ]
Remove some work units in the available queue. If `work_unit_names` is :const:`None` (which must be passed explicitly), all available work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed
[ "Remove", "some", "work", "units", "in", "the", "available", "queue", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1154-L1168
243,164
diffeo/rejester
rejester/_task_master.py
TaskMaster.remove_pending_work_units
def remove_pending_work_units(self, work_spec_name, work_unit_names): '''Remove some work units in the pending list. If `work_unit_names` is :const:`None` (which must be passed explicitly), all pending work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Note that this function has the potential to confuse workers if they are actually working on the work units in question. If you have ensured that the workers are dead and you would be otherwise waiting for the leases to expire before calling :meth:`remove_available_work_units`, then this is a useful shortcut. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, priority_min=time.time())
python
def remove_pending_work_units(self, work_spec_name, work_unit_names): '''Remove some work units in the pending list. If `work_unit_names` is :const:`None` (which must be passed explicitly), all pending work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Note that this function has the potential to confuse workers if they are actually working on the work units in question. If you have ensured that the workers are dead and you would be otherwise waiting for the leases to expire before calling :meth:`remove_available_work_units`, then this is a useful shortcut. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, priority_min=time.time())
[ "def", "remove_pending_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_names", ")", ":", "return", "self", ".", "_remove_some_work_units", "(", "work_spec_name", ",", "work_unit_names", ",", "priority_min", "=", "time", ".", "time", "(", ")", ")" ]
Remove some work units in the pending list. If `work_unit_names` is :const:`None` (which must be passed explicitly), all pending work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Note that this function has the potential to confuse workers if they are actually working on the work units in question. If you have ensured that the workers are dead and you would be otherwise waiting for the leases to expire before calling :meth:`remove_available_work_units`, then this is a useful shortcut. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed
[ "Remove", "some", "work", "units", "in", "the", "pending", "list", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1170-L1191
243,165
diffeo/rejester
rejester/_task_master.py
TaskMaster.remove_blocked_work_units
def remove_blocked_work_units(self, work_spec_name, work_unit_names): '''Remove some work units in the blocked list. If `work_unit_names` is :const:`None` (which must be passed explicitly), all pending work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Note that none of the "remove" functions will restart blocked work units, so if you have called e.g. :meth:`remove_available_work_units` for a predecessor job, you may need to also call this method for its successor. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, suffix=_BLOCKED)
python
def remove_blocked_work_units(self, work_spec_name, work_unit_names): '''Remove some work units in the blocked list. If `work_unit_names` is :const:`None` (which must be passed explicitly), all pending work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Note that none of the "remove" functions will restart blocked work units, so if you have called e.g. :meth:`remove_available_work_units` for a predecessor job, you may need to also call this method for its successor. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, suffix=_BLOCKED)
[ "def", "remove_blocked_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_names", ")", ":", "return", "self", ".", "_remove_some_work_units", "(", "work_spec_name", ",", "work_unit_names", ",", "suffix", "=", "_BLOCKED", ")" ]
Remove some work units in the blocked list. If `work_unit_names` is :const:`None` (which must be passed explicitly), all pending work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Note that none of the "remove" functions will restart blocked work units, so if you have called e.g. :meth:`remove_available_work_units` for a predecessor job, you may need to also call this method for its successor. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed
[ "Remove", "some", "work", "units", "in", "the", "blocked", "list", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1193-L1212
243,166
diffeo/rejester
rejester/_task_master.py
TaskMaster.remove_failed_work_units
def remove_failed_work_units(self, work_spec_name, work_unit_names): '''Remove some failed work units. If `work_unit_names` is :const:`None` (which must be passed explicitly), all failed work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Also consider :meth:`retry` to move failed work units back into the available queue. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, suffix=_FAILED)
python
def remove_failed_work_units(self, work_spec_name, work_unit_names): '''Remove some failed work units. If `work_unit_names` is :const:`None` (which must be passed explicitly), all failed work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Also consider :meth:`retry` to move failed work units back into the available queue. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, suffix=_FAILED)
[ "def", "remove_failed_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_names", ")", ":", "return", "self", ".", "_remove_some_work_units", "(", "work_spec_name", ",", "work_unit_names", ",", "suffix", "=", "_FAILED", ")" ]
Remove some failed work units. If `work_unit_names` is :const:`None` (which must be passed explicitly), all failed work units in `work_spec_name` are removed; otherwise only the specific named work units will be. Also consider :meth:`retry` to move failed work units back into the available queue. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed
[ "Remove", "some", "failed", "work", "units", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1214-L1231
243,167
diffeo/rejester
rejester/_task_master.py
TaskMaster.remove_finished_work_units
def remove_finished_work_units(self, work_spec_name, work_unit_names): '''Remove some finished work units. If `work_unit_names` is :const:`None` (which must be passed explicitly), all finished work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, suffix=_FINISHED)
python
def remove_finished_work_units(self, work_spec_name, work_unit_names): '''Remove some finished work units. If `work_unit_names` is :const:`None` (which must be passed explicitly), all finished work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed ''' return self._remove_some_work_units( work_spec_name, work_unit_names, suffix=_FINISHED)
[ "def", "remove_finished_work_units", "(", "self", ",", "work_spec_name", ",", "work_unit_names", ")", ":", "return", "self", ".", "_remove_some_work_units", "(", "work_spec_name", ",", "work_unit_names", ",", "suffix", "=", "_FINISHED", ")" ]
Remove some finished work units. If `work_unit_names` is :const:`None` (which must be passed explicitly), all finished work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec :param list work_unit_names: names of the work units, or :const:`None` for all in `work_spec_name` :return: number of work units removed
[ "Remove", "some", "finished", "work", "units", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1233-L1247
243,168
diffeo/rejester
rejester/_task_master.py
TaskMaster.get_work_unit_status
def get_work_unit_status(self, work_spec_name, work_unit_key): '''Get a high-level status for some work unit. The return value is a dictionary. The only required key is ``status``, which could be any of: ``missing`` The work unit does not exist anywhere ``available`` The work unit is available for new workers; additional keys include ``expiration`` (may be 0) ``pending`` The work unit is being worked on; additional keys include ``expiration`` and ``worker_id`` (usually) ``blocked`` The work unit is waiting for some other work units to finish; additional keys include ``depends_on`` ``finished`` The work unit has completed ``failed`` The work unit failed; additional keys include ``traceback`` :param str work_spec_name: name of the work spec :param str work_unit_name: name of the work unit :return: dictionary description of summary status ''' with self.registry.lock(identifier=self.worker_id) as session: # In the available list? (unit,priority) = session.get(WORK_UNITS_ + work_spec_name, work_unit_key, include_priority=True) if unit: result = {} if priority < time.time(): result['status'] = 'available' else: result['status'] = 'pending' result['expiration'] = priority # ...is anyone working on it? worker = session.get(WORK_UNITS_ + work_spec_name + "_locks", work_unit_key) if worker: result['worker_id'] = worker return result # In the finished list? unit = session.get(WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key) if unit: return { 'status': 'finished' } # In the failed list? unit = session.get(WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key) if unit: result = { 'status': 'failed' } if 'traceback' in unit: result['traceback'] = unit['traceback'] return result # In the blocked list? unit = session.get(WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key) if unit: # This should always have *something*, right? deps = session.get(WORK_UNITS_ + work_spec_name + _DEPENDS, work_unit_key, default=[]) result = { 'status': 'blocked', 'depends_on': deps } return result return { 'status': 'missing' }
python
def get_work_unit_status(self, work_spec_name, work_unit_key): '''Get a high-level status for some work unit. The return value is a dictionary. The only required key is ``status``, which could be any of: ``missing`` The work unit does not exist anywhere ``available`` The work unit is available for new workers; additional keys include ``expiration`` (may be 0) ``pending`` The work unit is being worked on; additional keys include ``expiration`` and ``worker_id`` (usually) ``blocked`` The work unit is waiting for some other work units to finish; additional keys include ``depends_on`` ``finished`` The work unit has completed ``failed`` The work unit failed; additional keys include ``traceback`` :param str work_spec_name: name of the work spec :param str work_unit_name: name of the work unit :return: dictionary description of summary status ''' with self.registry.lock(identifier=self.worker_id) as session: # In the available list? (unit,priority) = session.get(WORK_UNITS_ + work_spec_name, work_unit_key, include_priority=True) if unit: result = {} if priority < time.time(): result['status'] = 'available' else: result['status'] = 'pending' result['expiration'] = priority # ...is anyone working on it? worker = session.get(WORK_UNITS_ + work_spec_name + "_locks", work_unit_key) if worker: result['worker_id'] = worker return result # In the finished list? unit = session.get(WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key) if unit: return { 'status': 'finished' } # In the failed list? unit = session.get(WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key) if unit: result = { 'status': 'failed' } if 'traceback' in unit: result['traceback'] = unit['traceback'] return result # In the blocked list? unit = session.get(WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key) if unit: # This should always have *something*, right? deps = session.get(WORK_UNITS_ + work_spec_name + _DEPENDS, work_unit_key, default=[]) result = { 'status': 'blocked', 'depends_on': deps } return result return { 'status': 'missing' }
[ "def", "get_work_unit_status", "(", "self", ",", "work_spec_name", ",", "work_unit_key", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "# In the available list?", "(", "unit", ",", "priority", ")", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "work_unit_key", ",", "include_priority", "=", "True", ")", "if", "unit", ":", "result", "=", "{", "}", "if", "priority", "<", "time", ".", "time", "(", ")", ":", "result", "[", "'status'", "]", "=", "'available'", "else", ":", "result", "[", "'status'", "]", "=", "'pending'", "result", "[", "'expiration'", "]", "=", "priority", "# ...is anyone working on it?", "worker", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "\"_locks\"", ",", "work_unit_key", ")", "if", "worker", ":", "result", "[", "'worker_id'", "]", "=", "worker", "return", "result", "# In the finished list?", "unit", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FINISHED", ",", "work_unit_key", ")", "if", "unit", ":", "return", "{", "'status'", ":", "'finished'", "}", "# In the failed list?", "unit", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FAILED", ",", "work_unit_key", ")", "if", "unit", ":", "result", "=", "{", "'status'", ":", "'failed'", "}", "if", "'traceback'", "in", "unit", ":", "result", "[", "'traceback'", "]", "=", "unit", "[", "'traceback'", "]", "return", "result", "# In the blocked list?", "unit", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_BLOCKED", ",", "work_unit_key", ")", "if", "unit", ":", "# This should always have *something*, right?", "deps", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_DEPENDS", ",", "work_unit_key", ",", "default", "=", "[", "]", ")", "result", "=", "{", "'status'", ":", "'blocked'", ",", "'depends_on'", ":", "deps", "}", "return", "result", "return", "{", "'status'", ":", "'missing'", "}" ]
Get a high-level status for some work unit. The return value is a dictionary. The only required key is ``status``, which could be any of: ``missing`` The work unit does not exist anywhere ``available`` The work unit is available for new workers; additional keys include ``expiration`` (may be 0) ``pending`` The work unit is being worked on; additional keys include ``expiration`` and ``worker_id`` (usually) ``blocked`` The work unit is waiting for some other work units to finish; additional keys include ``depends_on`` ``finished`` The work unit has completed ``failed`` The work unit failed; additional keys include ``traceback`` :param str work_spec_name: name of the work spec :param str work_unit_name: name of the work unit :return: dictionary description of summary status
[ "Get", "a", "high", "-", "level", "status", "for", "some", "work", "unit", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1249-L1319
243,169
diffeo/rejester
rejester/_task_master.py
TaskMaster.inspect_work_unit
def inspect_work_unit(self, work_spec_name, work_unit_key): '''Get the data for some work unit. Returns the data for that work unit, or `None` if it really can't be found. :param str work_spec_name: name of the work spec :param str work_unit_key: name of the work unit :return: definition of the work unit, or `None` ''' with self.registry.lock(identifier=self.worker_id) as session: work_unit_data = session.get( WORK_UNITS_ + work_spec_name, work_unit_key) if not work_unit_data: work_unit_data = session.get( WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key) if not work_unit_data: work_unit_data = session.get( WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key) if not work_unit_data: work_unit_data = session.get( WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key) return work_unit_data
python
def inspect_work_unit(self, work_spec_name, work_unit_key): '''Get the data for some work unit. Returns the data for that work unit, or `None` if it really can't be found. :param str work_spec_name: name of the work spec :param str work_unit_key: name of the work unit :return: definition of the work unit, or `None` ''' with self.registry.lock(identifier=self.worker_id) as session: work_unit_data = session.get( WORK_UNITS_ + work_spec_name, work_unit_key) if not work_unit_data: work_unit_data = session.get( WORK_UNITS_ + work_spec_name + _BLOCKED, work_unit_key) if not work_unit_data: work_unit_data = session.get( WORK_UNITS_ + work_spec_name + _FINISHED, work_unit_key) if not work_unit_data: work_unit_data = session.get( WORK_UNITS_ + work_spec_name + _FAILED, work_unit_key) return work_unit_data
[ "def", "inspect_work_unit", "(", "self", ",", "work_spec_name", ",", "work_unit_key", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "work_unit_data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "work_unit_key", ")", "if", "not", "work_unit_data", ":", "work_unit_data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_BLOCKED", ",", "work_unit_key", ")", "if", "not", "work_unit_data", ":", "work_unit_data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FINISHED", ",", "work_unit_key", ")", "if", "not", "work_unit_data", ":", "work_unit_data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FAILED", ",", "work_unit_key", ")", "return", "work_unit_data" ]
Get the data for some work unit. Returns the data for that work unit, or `None` if it really can't be found. :param str work_spec_name: name of the work spec :param str work_unit_key: name of the work unit :return: definition of the work unit, or `None`
[ "Get", "the", "data", "for", "some", "work", "unit", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1321-L1343
243,170
diffeo/rejester
rejester/_task_master.py
TaskMaster.reset_all
def reset_all(self, work_spec_name): '''Restart a work spec. This calls :meth:`idle_all_workers`, then moves all finished jobs back into the available queue. .. deprecated:: 0.4.5 See :meth:`idle_all_workers` for problems with that method. This also ignores failed jobs and work unit dependencies. In practice, whatever generated a set of work units initially can recreate them easily enough. ''' self.idle_all_workers() with self.registry.lock(identifier=self.worker_id) as session: session.move_all(WORK_UNITS_ + work_spec_name + _FINISHED, WORK_UNITS_ + work_spec_name) session.reset_priorities(WORK_UNITS_ + work_spec_name, 0)
python
def reset_all(self, work_spec_name): '''Restart a work spec. This calls :meth:`idle_all_workers`, then moves all finished jobs back into the available queue. .. deprecated:: 0.4.5 See :meth:`idle_all_workers` for problems with that method. This also ignores failed jobs and work unit dependencies. In practice, whatever generated a set of work units initially can recreate them easily enough. ''' self.idle_all_workers() with self.registry.lock(identifier=self.worker_id) as session: session.move_all(WORK_UNITS_ + work_spec_name + _FINISHED, WORK_UNITS_ + work_spec_name) session.reset_priorities(WORK_UNITS_ + work_spec_name, 0)
[ "def", "reset_all", "(", "self", ",", "work_spec_name", ")", ":", "self", ".", "idle_all_workers", "(", ")", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "session", ".", "move_all", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "_FINISHED", ",", "WORK_UNITS_", "+", "work_spec_name", ")", "session", ".", "reset_priorities", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "0", ")" ]
Restart a work spec. This calls :meth:`idle_all_workers`, then moves all finished jobs back into the available queue. .. deprecated:: 0.4.5 See :meth:`idle_all_workers` for problems with that method. This also ignores failed jobs and work unit dependencies. In practice, whatever generated a set of work units initially can recreate them easily enough.
[ "Restart", "a", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1349-L1366
243,171
diffeo/rejester
rejester/_task_master.py
TaskMaster.add_dependent_work_units
def add_dependent_work_units(self, work_unit, depends_on, hard=True): """Add work units, where one prevents execution of the other. The two work units may be attached to different work specs, but both must be in this task master's namespace. `work_unit` and `depends_on` are both tuples of (work spec name, work unit name, work unit dictionary). The work specs must already exist; they may be created with :meth:`update_bundle` with an empty work unit dictionary. If a work unit dictionary is provided with either work unit, then this defines that work unit, and any existing definition is replaced. Either or both work unit dictionaries may be :const:`None`, in which case the work unit is not created if it does not already exist. In this last case, the other work unit will be added if specified, but the dependency will not be added, and this function will return :const:`False`. In all other cases, this dependency is added in addition to all existing dependencies on either or both work units, even if the work unit dictionary is replaced. `work_unit` will not be executed or reported as available via :meth:`get_work` until `depends_on` finishes execution. If the `depends_on` task fails, then the `hard` parameter describes what happens: if `hard` is :const:`True` then `work_unit` will also fail, but if `hard` is :const:`False` then `work_unit` will be able to execute even if `depends_on` fails, it just must have completed some execution attempt. Calling this function with ``hard=True`` suggests an ordered sequence of tasks where the later task depends on the output of the earlier tasks. Calling this function with ``hard=False`` suggests a cleanup task that must run after this task (and, likely, several others) are done, but doesn't specifically depend on its result being available. :param work_unit: "Later" work unit to execute :paramtype work_unit: tuple of (str,str,dict) :param depends_on: "Earlier" work unit to execute :paramtype depends_on: tuple of (str,str,dict) :param bool hard: if True, then `work_unit` automatically fails if `depends_on` fails :return: :const:`True`, unless one or both of the work units didn't exist and weren't specified, in which case, :const:`False` :raise rejester.exceptions.NoSuchWorkSpecError: if a work spec was named that doesn't exist """ # There's no good, not-confusing terminology here. # I'll call work_unit "later" and depends_on "earlier" # consistently, because that at least makes the time flow # correct. later_spec, later_unit, later_unitdef = work_unit earlier_spec, earlier_unit, earlier_unitdef = depends_on with self.registry.lock(identifier=self.worker_id) as session: # Bail if either work spec doesn't already exist if session.get(WORK_SPECS, later_spec) is None: raise NoSuchWorkSpecError(later_spec) if session.get(WORK_SPECS, earlier_spec) is None: raise NoSuchWorkSpecError(earlier_spec) # Cause both work units to exist (if possible) # Note that if "earlier" is already finished, we may be # able to make "later" available immediately earlier_done = False earlier_successful = False if earlier_unitdef is not None: session.update(WORK_UNITS_ + earlier_spec, { earlier_unit: earlier_unitdef }) else: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec, earlier_unit) if earlier_unitdef is None: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec + _BLOCKED, earlier_unit) if earlier_unitdef is None: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec + _FINISHED, earlier_unit) if earlier_unitdef is not None: earlier_done = True earlier_successful = True if earlier_unitdef is None: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec + _FAILED, earlier_unit) if earlier_unitdef is not None: earlier_done = True later_failed = earlier_done and hard and not earlier_successful later_unblocked = ((earlier_done and not later_failed) or (earlier_unitdef is None)) if later_failed: later_destination = WORK_UNITS_ + later_spec + _FAILED elif later_unblocked: later_destination = WORK_UNITS_ + later_spec else: later_destination = WORK_UNITS_ + later_spec + _BLOCKED if later_unitdef is not None: for suffix in ['', _FINISHED, _FAILED, _BLOCKED]: k = WORK_UNITS_ + later_spec + suffix if k != later_destination: session.popmany(k, later_unit) session.update(later_destination, { later_unit: later_unitdef }) elif earlier_unitdef is not None: later_unitdef = session.get( WORK_UNITS_ + later_spec, later_unit) if later_unitdef is not None: session.move( WORK_UNITS_ + later_spec, WORK_UNITS_ + later_spec + _BLOCKED, { later_unit: later_unitdef }) else: later_unitdef = session.get( WORK_UNITS_ + later_spec + _BLOCKED, later_unit) if later_unitdef is None or earlier_unitdef is None: return False # Now both units exist and are in the right place; # record the dependency blocks = session.get(WORK_UNITS_ + earlier_spec + _BLOCKS, earlier_unit) if blocks is None: blocks = [] blocks.append([later_spec, later_unit, hard]) session.set(WORK_UNITS_ + earlier_spec + _BLOCKS, earlier_unit, blocks) depends = session.get(WORK_UNITS_ + later_spec + _DEPENDS, later_unit) if depends is None: depends = [] depends.append([earlier_spec, earlier_unit]) session.set(WORK_UNITS_ + later_spec + _DEPENDS, later_unit, depends) return True
python
def add_dependent_work_units(self, work_unit, depends_on, hard=True): """Add work units, where one prevents execution of the other. The two work units may be attached to different work specs, but both must be in this task master's namespace. `work_unit` and `depends_on` are both tuples of (work spec name, work unit name, work unit dictionary). The work specs must already exist; they may be created with :meth:`update_bundle` with an empty work unit dictionary. If a work unit dictionary is provided with either work unit, then this defines that work unit, and any existing definition is replaced. Either or both work unit dictionaries may be :const:`None`, in which case the work unit is not created if it does not already exist. In this last case, the other work unit will be added if specified, but the dependency will not be added, and this function will return :const:`False`. In all other cases, this dependency is added in addition to all existing dependencies on either or both work units, even if the work unit dictionary is replaced. `work_unit` will not be executed or reported as available via :meth:`get_work` until `depends_on` finishes execution. If the `depends_on` task fails, then the `hard` parameter describes what happens: if `hard` is :const:`True` then `work_unit` will also fail, but if `hard` is :const:`False` then `work_unit` will be able to execute even if `depends_on` fails, it just must have completed some execution attempt. Calling this function with ``hard=True`` suggests an ordered sequence of tasks where the later task depends on the output of the earlier tasks. Calling this function with ``hard=False`` suggests a cleanup task that must run after this task (and, likely, several others) are done, but doesn't specifically depend on its result being available. :param work_unit: "Later" work unit to execute :paramtype work_unit: tuple of (str,str,dict) :param depends_on: "Earlier" work unit to execute :paramtype depends_on: tuple of (str,str,dict) :param bool hard: if True, then `work_unit` automatically fails if `depends_on` fails :return: :const:`True`, unless one or both of the work units didn't exist and weren't specified, in which case, :const:`False` :raise rejester.exceptions.NoSuchWorkSpecError: if a work spec was named that doesn't exist """ # There's no good, not-confusing terminology here. # I'll call work_unit "later" and depends_on "earlier" # consistently, because that at least makes the time flow # correct. later_spec, later_unit, later_unitdef = work_unit earlier_spec, earlier_unit, earlier_unitdef = depends_on with self.registry.lock(identifier=self.worker_id) as session: # Bail if either work spec doesn't already exist if session.get(WORK_SPECS, later_spec) is None: raise NoSuchWorkSpecError(later_spec) if session.get(WORK_SPECS, earlier_spec) is None: raise NoSuchWorkSpecError(earlier_spec) # Cause both work units to exist (if possible) # Note that if "earlier" is already finished, we may be # able to make "later" available immediately earlier_done = False earlier_successful = False if earlier_unitdef is not None: session.update(WORK_UNITS_ + earlier_spec, { earlier_unit: earlier_unitdef }) else: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec, earlier_unit) if earlier_unitdef is None: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec + _BLOCKED, earlier_unit) if earlier_unitdef is None: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec + _FINISHED, earlier_unit) if earlier_unitdef is not None: earlier_done = True earlier_successful = True if earlier_unitdef is None: earlier_unitdef = session.get( WORK_UNITS_ + earlier_spec + _FAILED, earlier_unit) if earlier_unitdef is not None: earlier_done = True later_failed = earlier_done and hard and not earlier_successful later_unblocked = ((earlier_done and not later_failed) or (earlier_unitdef is None)) if later_failed: later_destination = WORK_UNITS_ + later_spec + _FAILED elif later_unblocked: later_destination = WORK_UNITS_ + later_spec else: later_destination = WORK_UNITS_ + later_spec + _BLOCKED if later_unitdef is not None: for suffix in ['', _FINISHED, _FAILED, _BLOCKED]: k = WORK_UNITS_ + later_spec + suffix if k != later_destination: session.popmany(k, later_unit) session.update(later_destination, { later_unit: later_unitdef }) elif earlier_unitdef is not None: later_unitdef = session.get( WORK_UNITS_ + later_spec, later_unit) if later_unitdef is not None: session.move( WORK_UNITS_ + later_spec, WORK_UNITS_ + later_spec + _BLOCKED, { later_unit: later_unitdef }) else: later_unitdef = session.get( WORK_UNITS_ + later_spec + _BLOCKED, later_unit) if later_unitdef is None or earlier_unitdef is None: return False # Now both units exist and are in the right place; # record the dependency blocks = session.get(WORK_UNITS_ + earlier_spec + _BLOCKS, earlier_unit) if blocks is None: blocks = [] blocks.append([later_spec, later_unit, hard]) session.set(WORK_UNITS_ + earlier_spec + _BLOCKS, earlier_unit, blocks) depends = session.get(WORK_UNITS_ + later_spec + _DEPENDS, later_unit) if depends is None: depends = [] depends.append([earlier_spec, earlier_unit]) session.set(WORK_UNITS_ + later_spec + _DEPENDS, later_unit, depends) return True
[ "def", "add_dependent_work_units", "(", "self", ",", "work_unit", ",", "depends_on", ",", "hard", "=", "True", ")", ":", "# There's no good, not-confusing terminology here.", "# I'll call work_unit \"later\" and depends_on \"earlier\"", "# consistently, because that at least makes the time flow", "# correct.", "later_spec", ",", "later_unit", ",", "later_unitdef", "=", "work_unit", "earlier_spec", ",", "earlier_unit", ",", "earlier_unitdef", "=", "depends_on", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "# Bail if either work spec doesn't already exist", "if", "session", ".", "get", "(", "WORK_SPECS", ",", "later_spec", ")", "is", "None", ":", "raise", "NoSuchWorkSpecError", "(", "later_spec", ")", "if", "session", ".", "get", "(", "WORK_SPECS", ",", "earlier_spec", ")", "is", "None", ":", "raise", "NoSuchWorkSpecError", "(", "earlier_spec", ")", "# Cause both work units to exist (if possible)", "# Note that if \"earlier\" is already finished, we may be", "# able to make \"later\" available immediately", "earlier_done", "=", "False", "earlier_successful", "=", "False", "if", "earlier_unitdef", "is", "not", "None", ":", "session", ".", "update", "(", "WORK_UNITS_", "+", "earlier_spec", ",", "{", "earlier_unit", ":", "earlier_unitdef", "}", ")", "else", ":", "earlier_unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "earlier_spec", ",", "earlier_unit", ")", "if", "earlier_unitdef", "is", "None", ":", "earlier_unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "earlier_spec", "+", "_BLOCKED", ",", "earlier_unit", ")", "if", "earlier_unitdef", "is", "None", ":", "earlier_unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "earlier_spec", "+", "_FINISHED", ",", "earlier_unit", ")", "if", "earlier_unitdef", "is", "not", "None", ":", "earlier_done", "=", "True", "earlier_successful", "=", "True", "if", "earlier_unitdef", "is", "None", ":", "earlier_unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "earlier_spec", "+", "_FAILED", ",", "earlier_unit", ")", "if", "earlier_unitdef", "is", "not", "None", ":", "earlier_done", "=", "True", "later_failed", "=", "earlier_done", "and", "hard", "and", "not", "earlier_successful", "later_unblocked", "=", "(", "(", "earlier_done", "and", "not", "later_failed", ")", "or", "(", "earlier_unitdef", "is", "None", ")", ")", "if", "later_failed", ":", "later_destination", "=", "WORK_UNITS_", "+", "later_spec", "+", "_FAILED", "elif", "later_unblocked", ":", "later_destination", "=", "WORK_UNITS_", "+", "later_spec", "else", ":", "later_destination", "=", "WORK_UNITS_", "+", "later_spec", "+", "_BLOCKED", "if", "later_unitdef", "is", "not", "None", ":", "for", "suffix", "in", "[", "''", ",", "_FINISHED", ",", "_FAILED", ",", "_BLOCKED", "]", ":", "k", "=", "WORK_UNITS_", "+", "later_spec", "+", "suffix", "if", "k", "!=", "later_destination", ":", "session", ".", "popmany", "(", "k", ",", "later_unit", ")", "session", ".", "update", "(", "later_destination", ",", "{", "later_unit", ":", "later_unitdef", "}", ")", "elif", "earlier_unitdef", "is", "not", "None", ":", "later_unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "later_spec", ",", "later_unit", ")", "if", "later_unitdef", "is", "not", "None", ":", "session", ".", "move", "(", "WORK_UNITS_", "+", "later_spec", ",", "WORK_UNITS_", "+", "later_spec", "+", "_BLOCKED", ",", "{", "later_unit", ":", "later_unitdef", "}", ")", "else", ":", "later_unitdef", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "later_spec", "+", "_BLOCKED", ",", "later_unit", ")", "if", "later_unitdef", "is", "None", "or", "earlier_unitdef", "is", "None", ":", "return", "False", "# Now both units exist and are in the right place;", "# record the dependency", "blocks", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "earlier_spec", "+", "_BLOCKS", ",", "earlier_unit", ")", "if", "blocks", "is", "None", ":", "blocks", "=", "[", "]", "blocks", ".", "append", "(", "[", "later_spec", ",", "later_unit", ",", "hard", "]", ")", "session", ".", "set", "(", "WORK_UNITS_", "+", "earlier_spec", "+", "_BLOCKS", ",", "earlier_unit", ",", "blocks", ")", "depends", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "later_spec", "+", "_DEPENDS", ",", "later_unit", ")", "if", "depends", "is", "None", ":", "depends", "=", "[", "]", "depends", ".", "append", "(", "[", "earlier_spec", ",", "earlier_unit", "]", ")", "session", ".", "set", "(", "WORK_UNITS_", "+", "later_spec", "+", "_DEPENDS", ",", "later_unit", ",", "depends", ")", "return", "True" ]
Add work units, where one prevents execution of the other. The two work units may be attached to different work specs, but both must be in this task master's namespace. `work_unit` and `depends_on` are both tuples of (work spec name, work unit name, work unit dictionary). The work specs must already exist; they may be created with :meth:`update_bundle` with an empty work unit dictionary. If a work unit dictionary is provided with either work unit, then this defines that work unit, and any existing definition is replaced. Either or both work unit dictionaries may be :const:`None`, in which case the work unit is not created if it does not already exist. In this last case, the other work unit will be added if specified, but the dependency will not be added, and this function will return :const:`False`. In all other cases, this dependency is added in addition to all existing dependencies on either or both work units, even if the work unit dictionary is replaced. `work_unit` will not be executed or reported as available via :meth:`get_work` until `depends_on` finishes execution. If the `depends_on` task fails, then the `hard` parameter describes what happens: if `hard` is :const:`True` then `work_unit` will also fail, but if `hard` is :const:`False` then `work_unit` will be able to execute even if `depends_on` fails, it just must have completed some execution attempt. Calling this function with ``hard=True`` suggests an ordered sequence of tasks where the later task depends on the output of the earlier tasks. Calling this function with ``hard=False`` suggests a cleanup task that must run after this task (and, likely, several others) are done, but doesn't specifically depend on its result being available. :param work_unit: "Later" work unit to execute :paramtype work_unit: tuple of (str,str,dict) :param depends_on: "Earlier" work unit to execute :paramtype depends_on: tuple of (str,str,dict) :param bool hard: if True, then `work_unit` automatically fails if `depends_on` fails :return: :const:`True`, unless one or both of the work units didn't exist and weren't specified, in which case, :const:`False` :raise rejester.exceptions.NoSuchWorkSpecError: if a work spec was named that doesn't exist
[ "Add", "work", "units", "where", "one", "prevents", "execution", "of", "the", "other", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1423-L1557
243,172
diffeo/rejester
rejester/_task_master.py
TaskMaster.nice
def nice(self, work_spec_name, nice): '''Change the priority of an existing work spec.''' with self.registry.lock(identifier=self.worker_id) as session: session.update(NICE_LEVELS, dict(work_spec_name=nice))
python
def nice(self, work_spec_name, nice): '''Change the priority of an existing work spec.''' with self.registry.lock(identifier=self.worker_id) as session: session.update(NICE_LEVELS, dict(work_spec_name=nice))
[ "def", "nice", "(", "self", ",", "work_spec_name", ",", "nice", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "session", ".", "update", "(", "NICE_LEVELS", ",", "dict", "(", "work_spec_name", "=", "nice", ")", ")" ]
Change the priority of an existing work spec.
[ "Change", "the", "priority", "of", "an", "existing", "work", "spec", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1596-L1599
243,173
diffeo/rejester
rejester/_task_master.py
TaskMaster.get_assigned_work_unit
def get_assigned_work_unit(self, worker_id, work_spec_name, work_unit_key): '''get a specific WorkUnit that has already been assigned to a particular worker_id ''' with self.registry.lock(identifier=self.worker_id) as session: assigned_work_unit_key = session.get( WORK_UNITS_ + work_spec_name + '_locks', worker_id) if not assigned_work_unit_key == work_unit_key: # raise LostLease instead of EnvironmentError, so # users of TaskMaster can have a single type of # expected exception, rather than two raise LostLease( 'assigned_work_unit_key=%r != %r' % (assigned_work_unit_key, work_unit_key)) # could trap EnvironmentError and raise LostLease instead work_unit_data = session.get(WORK_UNITS_ + work_spec_name, work_unit_key) return WorkUnit( self.registry, work_spec_name, work_unit_key, work_unit_data, worker_id=worker_id, default_lifetime=self.default_lifetime, )
python
def get_assigned_work_unit(self, worker_id, work_spec_name, work_unit_key): '''get a specific WorkUnit that has already been assigned to a particular worker_id ''' with self.registry.lock(identifier=self.worker_id) as session: assigned_work_unit_key = session.get( WORK_UNITS_ + work_spec_name + '_locks', worker_id) if not assigned_work_unit_key == work_unit_key: # raise LostLease instead of EnvironmentError, so # users of TaskMaster can have a single type of # expected exception, rather than two raise LostLease( 'assigned_work_unit_key=%r != %r' % (assigned_work_unit_key, work_unit_key)) # could trap EnvironmentError and raise LostLease instead work_unit_data = session.get(WORK_UNITS_ + work_spec_name, work_unit_key) return WorkUnit( self.registry, work_spec_name, work_unit_key, work_unit_data, worker_id=worker_id, default_lifetime=self.default_lifetime, )
[ "def", "get_assigned_work_unit", "(", "self", ",", "worker_id", ",", "work_spec_name", ",", "work_unit_key", ")", ":", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "self", ".", "worker_id", ")", "as", "session", ":", "assigned_work_unit_key", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "'_locks'", ",", "worker_id", ")", "if", "not", "assigned_work_unit_key", "==", "work_unit_key", ":", "# raise LostLease instead of EnvironmentError, so", "# users of TaskMaster can have a single type of", "# expected exception, rather than two", "raise", "LostLease", "(", "'assigned_work_unit_key=%r != %r'", "%", "(", "assigned_work_unit_key", ",", "work_unit_key", ")", ")", "# could trap EnvironmentError and raise LostLease instead", "work_unit_data", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "work_unit_key", ")", "return", "WorkUnit", "(", "self", ".", "registry", ",", "work_spec_name", ",", "work_unit_key", ",", "work_unit_data", ",", "worker_id", "=", "worker_id", ",", "default_lifetime", "=", "self", ".", "default_lifetime", ",", ")" ]
get a specific WorkUnit that has already been assigned to a particular worker_id
[ "get", "a", "specific", "WorkUnit", "that", "has", "already", "been", "assigned", "to", "a", "particular", "worker_id" ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1695-L1718
243,174
diffeo/rejester
rejester/_task_master.py
TaskMaster.get_child_work_units
def get_child_work_units(self, worker_id): '''Get work units assigned to a worker's children. Returns a dictionary mapping worker ID to :class:`WorkUnit`. If a child exists but is idle, that worker ID will map to :const:`None`. The work unit may already be expired or assigned to a different worker; this will be reflected in the returned :class:`WorkUnit`. This may write back to the underlying data store to clean up stale children that have not unregistered themselves but no longer exist in any form. ''' result = {} with self.registry.lock(identifier=worker_id) as session: all_children = session.pull(WORKER_CHILDREN_ + worker_id) # The data stored in Redis isn't actually conducive to # this specific query; we will need to scan each work spec # for each work unit work_specs = session.pull(WORK_SPECS) for child in all_children.iterkeys(): work_spec_name = None for spec in work_specs.iterkeys(): work_unit_key = session.get( WORK_UNITS_ + spec + '_locks', child) if work_unit_key: work_spec_name = spec break if work_spec_name: assigned = session.get( WORK_UNITS_ + work_spec_name + '_locks', work_unit_key) (data, expires) = session.get( WORK_UNITS_ + work_spec_name, work_unit_key, include_priority=True) if data is None: # The work unit is probably already finished result[child] = None else: result[child] = WorkUnit( self.registry, work_spec_name, work_unit_key, data, expires=expires, worker_id=assigned) else: # The child isn't doing anything. Does it still # exist? heartbeat = session.get(WORKER_OBSERVED_MODE, child) if heartbeat: result[child] = None else: session.popmany(WORKER_CHILDREN_ + worker_id, child) return result
python
def get_child_work_units(self, worker_id): '''Get work units assigned to a worker's children. Returns a dictionary mapping worker ID to :class:`WorkUnit`. If a child exists but is idle, that worker ID will map to :const:`None`. The work unit may already be expired or assigned to a different worker; this will be reflected in the returned :class:`WorkUnit`. This may write back to the underlying data store to clean up stale children that have not unregistered themselves but no longer exist in any form. ''' result = {} with self.registry.lock(identifier=worker_id) as session: all_children = session.pull(WORKER_CHILDREN_ + worker_id) # The data stored in Redis isn't actually conducive to # this specific query; we will need to scan each work spec # for each work unit work_specs = session.pull(WORK_SPECS) for child in all_children.iterkeys(): work_spec_name = None for spec in work_specs.iterkeys(): work_unit_key = session.get( WORK_UNITS_ + spec + '_locks', child) if work_unit_key: work_spec_name = spec break if work_spec_name: assigned = session.get( WORK_UNITS_ + work_spec_name + '_locks', work_unit_key) (data, expires) = session.get( WORK_UNITS_ + work_spec_name, work_unit_key, include_priority=True) if data is None: # The work unit is probably already finished result[child] = None else: result[child] = WorkUnit( self.registry, work_spec_name, work_unit_key, data, expires=expires, worker_id=assigned) else: # The child isn't doing anything. Does it still # exist? heartbeat = session.get(WORKER_OBSERVED_MODE, child) if heartbeat: result[child] = None else: session.popmany(WORKER_CHILDREN_ + worker_id, child) return result
[ "def", "get_child_work_units", "(", "self", ",", "worker_id", ")", ":", "result", "=", "{", "}", "with", "self", ".", "registry", ".", "lock", "(", "identifier", "=", "worker_id", ")", "as", "session", ":", "all_children", "=", "session", ".", "pull", "(", "WORKER_CHILDREN_", "+", "worker_id", ")", "# The data stored in Redis isn't actually conducive to", "# this specific query; we will need to scan each work spec", "# for each work unit", "work_specs", "=", "session", ".", "pull", "(", "WORK_SPECS", ")", "for", "child", "in", "all_children", ".", "iterkeys", "(", ")", ":", "work_spec_name", "=", "None", "for", "spec", "in", "work_specs", ".", "iterkeys", "(", ")", ":", "work_unit_key", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "spec", "+", "'_locks'", ",", "child", ")", "if", "work_unit_key", ":", "work_spec_name", "=", "spec", "break", "if", "work_spec_name", ":", "assigned", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", "+", "'_locks'", ",", "work_unit_key", ")", "(", "data", ",", "expires", ")", "=", "session", ".", "get", "(", "WORK_UNITS_", "+", "work_spec_name", ",", "work_unit_key", ",", "include_priority", "=", "True", ")", "if", "data", "is", "None", ":", "# The work unit is probably already finished", "result", "[", "child", "]", "=", "None", "else", ":", "result", "[", "child", "]", "=", "WorkUnit", "(", "self", ".", "registry", ",", "work_spec_name", ",", "work_unit_key", ",", "data", ",", "expires", "=", "expires", ",", "worker_id", "=", "assigned", ")", "else", ":", "# The child isn't doing anything. Does it still", "# exist?", "heartbeat", "=", "session", ".", "get", "(", "WORKER_OBSERVED_MODE", ",", "child", ")", "if", "heartbeat", ":", "result", "[", "child", "]", "=", "None", "else", ":", "session", ".", "popmany", "(", "WORKER_CHILDREN_", "+", "worker_id", ",", "child", ")", "return", "result" ]
Get work units assigned to a worker's children. Returns a dictionary mapping worker ID to :class:`WorkUnit`. If a child exists but is idle, that worker ID will map to :const:`None`. The work unit may already be expired or assigned to a different worker; this will be reflected in the returned :class:`WorkUnit`. This may write back to the underlying data store to clean up stale children that have not unregistered themselves but no longer exist in any form.
[ "Get", "work", "units", "assigned", "to", "a", "worker", "s", "children", "." ]
5438a4a18be2801d7826c46e2079ba9639d2ecb4
https://github.com/diffeo/rejester/blob/5438a4a18be2801d7826c46e2079ba9639d2ecb4/rejester/_task_master.py#L1720-L1773
243,175
scivision/histutils
histutils/plotsimul.py
plotPlainImg
def plotPlainImg(sim, cam, rawdata, t, odir): """ No subplots, just a plan http://stackoverflow.com/questions/22408237/named-colors-in-matplotlib """ for R, C in zip(rawdata, cam): fg = figure() ax = fg.gca() ax.set_axis_off() # no ticks ax.imshow(R[t, :, :], origin='lower', vmin=max(C.clim[0], 1), vmax=C.clim[1], cmap='gray') ax.text(0.05, 0.075, datetime.utcfromtimestamp(C.tKeo[t]).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3], ha='left', va='top', transform=ax.transAxes, color='limegreen', # weight='bold', size=24 ) writeplots(fg, 'cam{}rawFrame'.format(C.name), t, odir)
python
def plotPlainImg(sim, cam, rawdata, t, odir): """ No subplots, just a plan http://stackoverflow.com/questions/22408237/named-colors-in-matplotlib """ for R, C in zip(rawdata, cam): fg = figure() ax = fg.gca() ax.set_axis_off() # no ticks ax.imshow(R[t, :, :], origin='lower', vmin=max(C.clim[0], 1), vmax=C.clim[1], cmap='gray') ax.text(0.05, 0.075, datetime.utcfromtimestamp(C.tKeo[t]).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3], ha='left', va='top', transform=ax.transAxes, color='limegreen', # weight='bold', size=24 ) writeplots(fg, 'cam{}rawFrame'.format(C.name), t, odir)
[ "def", "plotPlainImg", "(", "sim", ",", "cam", ",", "rawdata", ",", "t", ",", "odir", ")", ":", "for", "R", ",", "C", "in", "zip", "(", "rawdata", ",", "cam", ")", ":", "fg", "=", "figure", "(", ")", "ax", "=", "fg", ".", "gca", "(", ")", "ax", ".", "set_axis_off", "(", ")", "# no ticks", "ax", ".", "imshow", "(", "R", "[", "t", ",", ":", ",", ":", "]", ",", "origin", "=", "'lower'", ",", "vmin", "=", "max", "(", "C", ".", "clim", "[", "0", "]", ",", "1", ")", ",", "vmax", "=", "C", ".", "clim", "[", "1", "]", ",", "cmap", "=", "'gray'", ")", "ax", ".", "text", "(", "0.05", ",", "0.075", ",", "datetime", ".", "utcfromtimestamp", "(", "C", ".", "tKeo", "[", "t", "]", ")", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%S.%f'", ")", "[", ":", "-", "3", "]", ",", "ha", "=", "'left'", ",", "va", "=", "'top'", ",", "transform", "=", "ax", ".", "transAxes", ",", "color", "=", "'limegreen'", ",", "# weight='bold',", "size", "=", "24", ")", "writeplots", "(", "fg", ",", "'cam{}rawFrame'", ".", "format", "(", "C", ".", "name", ")", ",", "t", ",", "odir", ")" ]
No subplots, just a plan http://stackoverflow.com/questions/22408237/named-colors-in-matplotlib
[ "No", "subplots", "just", "a", "plan" ]
859a91d3894cb57faed34881c6ea16130b90571e
https://github.com/scivision/histutils/blob/859a91d3894cb57faed34881c6ea16130b90571e/histutils/plotsimul.py#L23-L46
243,176
arcus-io/puppetdb-python
puppetdb/v2/nodes.py
get_nodes
def get_nodes(api_url=None, verify=False, cert=list()): """ Returns info for all Nodes :param api_url: Base PuppetDB API url """ return utils._make_api_request(api_url, '/nodes', verify, cert)
python
def get_nodes(api_url=None, verify=False, cert=list()): """ Returns info for all Nodes :param api_url: Base PuppetDB API url """ return utils._make_api_request(api_url, '/nodes', verify, cert)
[ "def", "get_nodes", "(", "api_url", "=", "None", ",", "verify", "=", "False", ",", "cert", "=", "list", "(", ")", ")", ":", "return", "utils", ".", "_make_api_request", "(", "api_url", ",", "'/nodes'", ",", "verify", ",", "cert", ")" ]
Returns info for all Nodes :param api_url: Base PuppetDB API url
[ "Returns", "info", "for", "all", "Nodes" ]
d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2
https://github.com/arcus-io/puppetdb-python/blob/d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2/puppetdb/v2/nodes.py#L25-L32
243,177
arcus-io/puppetdb-python
puppetdb/v2/nodes.py
get_node
def get_node(api_url=None, node_name=None, verify=False, cert=list()): """ Returns info for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node """ return utils._make_api_request(api_url, '/nodes/{0}'.format(node_name), verify, cert)
python
def get_node(api_url=None, node_name=None, verify=False, cert=list()): """ Returns info for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node """ return utils._make_api_request(api_url, '/nodes/{0}'.format(node_name), verify, cert)
[ "def", "get_node", "(", "api_url", "=", "None", ",", "node_name", "=", "None", ",", "verify", "=", "False", ",", "cert", "=", "list", "(", ")", ")", ":", "return", "utils", ".", "_make_api_request", "(", "api_url", ",", "'/nodes/{0}'", ".", "format", "(", "node_name", ")", ",", "verify", ",", "cert", ")" ]
Returns info for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node
[ "Returns", "info", "for", "a", "Node" ]
d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2
https://github.com/arcus-io/puppetdb-python/blob/d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2/puppetdb/v2/nodes.py#L34-L42
243,178
arcus-io/puppetdb-python
puppetdb/v2/nodes.py
get_node_fact_by_name
def get_node_fact_by_name(api_url=None, node_name=None, fact_name=None, verify=False, cert=list()): """ Returns specified fact for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node :param fact_name: Name of fact """ return utils._make_api_request(api_url, '/nodes/{0}/facts/{1}'.format(node_name, fact_name), verify, cert)
python
def get_node_fact_by_name(api_url=None, node_name=None, fact_name=None, verify=False, cert=list()): """ Returns specified fact for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node :param fact_name: Name of fact """ return utils._make_api_request(api_url, '/nodes/{0}/facts/{1}'.format(node_name, fact_name), verify, cert)
[ "def", "get_node_fact_by_name", "(", "api_url", "=", "None", ",", "node_name", "=", "None", ",", "fact_name", "=", "None", ",", "verify", "=", "False", ",", "cert", "=", "list", "(", ")", ")", ":", "return", "utils", ".", "_make_api_request", "(", "api_url", ",", "'/nodes/{0}/facts/{1}'", ".", "format", "(", "node_name", ",", "fact_name", ")", ",", "verify", ",", "cert", ")" ]
Returns specified fact for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node :param fact_name: Name of fact
[ "Returns", "specified", "fact", "for", "a", "Node" ]
d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2
https://github.com/arcus-io/puppetdb-python/blob/d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2/puppetdb/v2/nodes.py#L54-L64
243,179
arcus-io/puppetdb-python
puppetdb/v2/nodes.py
get_node_resource_by_type
def get_node_resource_by_type(api_url=None, node_name=None, type_name=None, verify=False, cert=list()): """ Returns specified resource for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node :param type_name: Type of resource """ return utils._make_api_request(api_url, '/nodes/{0}/resources/{1}'.format(node_name, type_name), verify, cert)
python
def get_node_resource_by_type(api_url=None, node_name=None, type_name=None, verify=False, cert=list()): """ Returns specified resource for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node :param type_name: Type of resource """ return utils._make_api_request(api_url, '/nodes/{0}/resources/{1}'.format(node_name, type_name), verify, cert)
[ "def", "get_node_resource_by_type", "(", "api_url", "=", "None", ",", "node_name", "=", "None", ",", "type_name", "=", "None", ",", "verify", "=", "False", ",", "cert", "=", "list", "(", ")", ")", ":", "return", "utils", ".", "_make_api_request", "(", "api_url", ",", "'/nodes/{0}/resources/{1}'", ".", "format", "(", "node_name", ",", "type_name", ")", ",", "verify", ",", "cert", ")" ]
Returns specified resource for a Node :param api_url: Base PuppetDB API url :param node_name: Name of node :param type_name: Type of resource
[ "Returns", "specified", "resource", "for", "a", "Node" ]
d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2
https://github.com/arcus-io/puppetdb-python/blob/d772eb80a1dfb1154a1f421c7ecdc1ac951b5ea2/puppetdb/v2/nodes.py#L76-L87
243,180
EnigmaBridge/client.py
ebclient/eb_request.py
RequestCall.call
def call(self, request=None, *args, **kwargs): """ Calls multiple time - with retry. :param request: :return: response """ if request is not None: self.request = request retry = self.request.configuration.retry if not isinstance(retry, SimpleRetry): raise Error('Currently only the fast retry strategy is supported') last_exception = None for i in range(0, retry.max_retry): try: if i > 0: retry.sleep_jitter() self.call_once() return self.response except Exception as ex: last_exception = RequestFailed(message='Request failed', cause=ex) logger.debug("Request %d failed, exception: %s" % (i, ex)) # Last exception - throw it here to have a stack if i+1 == retry.max_retry: raise last_exception raise last_exception
python
def call(self, request=None, *args, **kwargs): """ Calls multiple time - with retry. :param request: :return: response """ if request is not None: self.request = request retry = self.request.configuration.retry if not isinstance(retry, SimpleRetry): raise Error('Currently only the fast retry strategy is supported') last_exception = None for i in range(0, retry.max_retry): try: if i > 0: retry.sleep_jitter() self.call_once() return self.response except Exception as ex: last_exception = RequestFailed(message='Request failed', cause=ex) logger.debug("Request %d failed, exception: %s" % (i, ex)) # Last exception - throw it here to have a stack if i+1 == retry.max_retry: raise last_exception raise last_exception
[ "def", "call", "(", "self", ",", "request", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "request", "is", "not", "None", ":", "self", ".", "request", "=", "request", "retry", "=", "self", ".", "request", ".", "configuration", ".", "retry", "if", "not", "isinstance", "(", "retry", ",", "SimpleRetry", ")", ":", "raise", "Error", "(", "'Currently only the fast retry strategy is supported'", ")", "last_exception", "=", "None", "for", "i", "in", "range", "(", "0", ",", "retry", ".", "max_retry", ")", ":", "try", ":", "if", "i", ">", "0", ":", "retry", ".", "sleep_jitter", "(", ")", "self", ".", "call_once", "(", ")", "return", "self", ".", "response", "except", "Exception", "as", "ex", ":", "last_exception", "=", "RequestFailed", "(", "message", "=", "'Request failed'", ",", "cause", "=", "ex", ")", "logger", ".", "debug", "(", "\"Request %d failed, exception: %s\"", "%", "(", "i", ",", "ex", ")", ")", "# Last exception - throw it here to have a stack", "if", "i", "+", "1", "==", "retry", ".", "max_retry", ":", "raise", "last_exception", "raise", "last_exception" ]
Calls multiple time - with retry. :param request: :return: response
[ "Calls", "multiple", "time", "-", "with", "retry", "." ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_request.py#L68-L99
243,181
EnigmaBridge/client.py
ebclient/eb_request.py
RequestCall.field_to_long
def field_to_long(value): """ Converts given value to long if possible, otherwise None is returned. :param value: :return: """ if isinstance(value, (int, long)): return long(value) elif isinstance(value, basestring): return bytes_to_long(from_hex(value)) else: return None
python
def field_to_long(value): """ Converts given value to long if possible, otherwise None is returned. :param value: :return: """ if isinstance(value, (int, long)): return long(value) elif isinstance(value, basestring): return bytes_to_long(from_hex(value)) else: return None
[ "def", "field_to_long", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "int", ",", "long", ")", ")", ":", "return", "long", "(", "value", ")", "elif", "isinstance", "(", "value", ",", "basestring", ")", ":", "return", "bytes_to_long", "(", "from_hex", "(", "value", ")", ")", "else", ":", "return", "None" ]
Converts given value to long if possible, otherwise None is returned. :param value: :return:
[ "Converts", "given", "value", "to", "long", "if", "possible", "otherwise", "None", "is", "returned", "." ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_request.py#L102-L114
243,182
EnigmaBridge/client.py
ebclient/eb_request.py
RequestCall.call_once
def call_once(self, request=None, *args, **kwargs): """ Performs one API request. Raises exception on failure. :param request: :param args: :param kwargs: :return: response """ if request is not None: self.request = request config = self.request.configuration if config.http_method != EBConsts.HTTP_METHOD_POST or config.method != EBConsts.METHOD_REST: raise Error('Not implemented yet, only REST POST method is allowed') url = self.request.url if self.request.url is not None else self.build_url() logger.debug("URL to call: %s", url) # Do the request resp = requests.post(url, json=self.request.body, timeout=config.timeout, headers=self.request.headers) self.last_resp = resp return self.check_response(resp)
python
def call_once(self, request=None, *args, **kwargs): """ Performs one API request. Raises exception on failure. :param request: :param args: :param kwargs: :return: response """ if request is not None: self.request = request config = self.request.configuration if config.http_method != EBConsts.HTTP_METHOD_POST or config.method != EBConsts.METHOD_REST: raise Error('Not implemented yet, only REST POST method is allowed') url = self.request.url if self.request.url is not None else self.build_url() logger.debug("URL to call: %s", url) # Do the request resp = requests.post(url, json=self.request.body, timeout=config.timeout, headers=self.request.headers) self.last_resp = resp return self.check_response(resp)
[ "def", "call_once", "(", "self", ",", "request", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "request", "is", "not", "None", ":", "self", ".", "request", "=", "request", "config", "=", "self", ".", "request", ".", "configuration", "if", "config", ".", "http_method", "!=", "EBConsts", ".", "HTTP_METHOD_POST", "or", "config", ".", "method", "!=", "EBConsts", ".", "METHOD_REST", ":", "raise", "Error", "(", "'Not implemented yet, only REST POST method is allowed'", ")", "url", "=", "self", ".", "request", ".", "url", "if", "self", ".", "request", ".", "url", "is", "not", "None", "else", "self", ".", "build_url", "(", ")", "logger", ".", "debug", "(", "\"URL to call: %s\"", ",", "url", ")", "# Do the request", "resp", "=", "requests", ".", "post", "(", "url", ",", "json", "=", "self", ".", "request", ".", "body", ",", "timeout", "=", "config", ".", "timeout", ",", "headers", "=", "self", ".", "request", ".", "headers", ")", "self", ".", "last_resp", "=", "resp", "return", "self", ".", "check_response", "(", "resp", ")" ]
Performs one API request. Raises exception on failure. :param request: :param args: :param kwargs: :return: response
[ "Performs", "one", "API", "request", ".", "Raises", "exception", "on", "failure", "." ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_request.py#L149-L172
243,183
EnigmaBridge/client.py
ebclient/eb_request.py
RequestCall.check_response
def check_response(self, resp): """ Checks response after request was made. Checks status of the response, mainly :param resp: :return: """ # For successful API call, response code will be 200 (OK) if resp.ok: json = resp.json() self.response = ResponseHolder() self.response.response = json # Check the code if 'status' not in json: raise InvalidResponse('No status field') self.response.status = self.field_to_long(json['status']) if self.response.status != EBConsts.STATUS_OK: txt_status = self.get_text_status(json) raise InvalidStatus('Status is %s (%04X)' % (txt_status if txt_status is not None else "", self.response.status)) if self.response_checker is not None: self.response_checker(self.response) return self.response else: # If response code is not ok (200), print the resulting http error code with description resp.raise_for_status() pass
python
def check_response(self, resp): """ Checks response after request was made. Checks status of the response, mainly :param resp: :return: """ # For successful API call, response code will be 200 (OK) if resp.ok: json = resp.json() self.response = ResponseHolder() self.response.response = json # Check the code if 'status' not in json: raise InvalidResponse('No status field') self.response.status = self.field_to_long(json['status']) if self.response.status != EBConsts.STATUS_OK: txt_status = self.get_text_status(json) raise InvalidStatus('Status is %s (%04X)' % (txt_status if txt_status is not None else "", self.response.status)) if self.response_checker is not None: self.response_checker(self.response) return self.response else: # If response code is not ok (200), print the resulting http error code with description resp.raise_for_status() pass
[ "def", "check_response", "(", "self", ",", "resp", ")", ":", "# For successful API call, response code will be 200 (OK)", "if", "resp", ".", "ok", ":", "json", "=", "resp", ".", "json", "(", ")", "self", ".", "response", "=", "ResponseHolder", "(", ")", "self", ".", "response", ".", "response", "=", "json", "# Check the code", "if", "'status'", "not", "in", "json", ":", "raise", "InvalidResponse", "(", "'No status field'", ")", "self", ".", "response", ".", "status", "=", "self", ".", "field_to_long", "(", "json", "[", "'status'", "]", ")", "if", "self", ".", "response", ".", "status", "!=", "EBConsts", ".", "STATUS_OK", ":", "txt_status", "=", "self", ".", "get_text_status", "(", "json", ")", "raise", "InvalidStatus", "(", "'Status is %s (%04X)'", "%", "(", "txt_status", "if", "txt_status", "is", "not", "None", "else", "\"\"", ",", "self", ".", "response", ".", "status", ")", ")", "if", "self", ".", "response_checker", "is", "not", "None", ":", "self", ".", "response_checker", "(", "self", ".", "response", ")", "return", "self", ".", "response", "else", ":", "# If response code is not ok (200), print the resulting http error code with description", "resp", ".", "raise_for_status", "(", ")", "pass" ]
Checks response after request was made. Checks status of the response, mainly :param resp: :return:
[ "Checks", "response", "after", "request", "was", "made", ".", "Checks", "status", "of", "the", "response", "mainly" ]
0fafe3902da394da88e9f960751d695ca65bbabd
https://github.com/EnigmaBridge/client.py/blob/0fafe3902da394da88e9f960751d695ca65bbabd/ebclient/eb_request.py#L174-L207
243,184
rjw57/throw
throw/minus_renderer.py
create_email
def create_email(filepaths, collection_name): """Create an email message object which implements the email.message.Message interface and which has the files to be shared uploaded to min.us and links placed in the message body. """ gallery = minus.CreateGallery() if collection_name is not None: gallery.SaveGallery(collection_name) interface = TerminalInterface() interface.new_section() interface.message(\ 'Uploading files to http://min.us/m%s...' % (gallery.reader_id,)) item_map = { } for path in filepaths: interface.message('Uploading %s...' % (os.path.basename(path),)) interface.start_progress() item = minus.UploadItem(path, gallery, os.path.basename(path), interface.update_progress) interface.end_progress() item_map[item.id] = os.path.basename(path) msg_str = '' msg_str += "I've shared some files with you. They are viewable as a " msg_str += "gallery at the following link:\n\n - http://min.us/m%s\n\n" %\ (gallery.reader_id,) msg_str += "The individual files can be downloaded from the following " msg_str += "links:\n\n" for item, name in item_map.items(): msg_str += ' - http://i.min.us/j%s%s %s\n' % \ (item, os.path.splitext(name)[1], name) msg = MIMEText(msg_str) msg.add_header('Format', 'Flowed') return msg
python
def create_email(filepaths, collection_name): """Create an email message object which implements the email.message.Message interface and which has the files to be shared uploaded to min.us and links placed in the message body. """ gallery = minus.CreateGallery() if collection_name is not None: gallery.SaveGallery(collection_name) interface = TerminalInterface() interface.new_section() interface.message(\ 'Uploading files to http://min.us/m%s...' % (gallery.reader_id,)) item_map = { } for path in filepaths: interface.message('Uploading %s...' % (os.path.basename(path),)) interface.start_progress() item = minus.UploadItem(path, gallery, os.path.basename(path), interface.update_progress) interface.end_progress() item_map[item.id] = os.path.basename(path) msg_str = '' msg_str += "I've shared some files with you. They are viewable as a " msg_str += "gallery at the following link:\n\n - http://min.us/m%s\n\n" %\ (gallery.reader_id,) msg_str += "The individual files can be downloaded from the following " msg_str += "links:\n\n" for item, name in item_map.items(): msg_str += ' - http://i.min.us/j%s%s %s\n' % \ (item, os.path.splitext(name)[1], name) msg = MIMEText(msg_str) msg.add_header('Format', 'Flowed') return msg
[ "def", "create_email", "(", "filepaths", ",", "collection_name", ")", ":", "gallery", "=", "minus", ".", "CreateGallery", "(", ")", "if", "collection_name", "is", "not", "None", ":", "gallery", ".", "SaveGallery", "(", "collection_name", ")", "interface", "=", "TerminalInterface", "(", ")", "interface", ".", "new_section", "(", ")", "interface", ".", "message", "(", "'Uploading files to http://min.us/m%s...'", "%", "(", "gallery", ".", "reader_id", ",", ")", ")", "item_map", "=", "{", "}", "for", "path", "in", "filepaths", ":", "interface", ".", "message", "(", "'Uploading %s...'", "%", "(", "os", ".", "path", ".", "basename", "(", "path", ")", ",", ")", ")", "interface", ".", "start_progress", "(", ")", "item", "=", "minus", ".", "UploadItem", "(", "path", ",", "gallery", ",", "os", ".", "path", ".", "basename", "(", "path", ")", ",", "interface", ".", "update_progress", ")", "interface", ".", "end_progress", "(", ")", "item_map", "[", "item", ".", "id", "]", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "msg_str", "=", "''", "msg_str", "+=", "\"I've shared some files with you. They are viewable as a \"", "msg_str", "+=", "\"gallery at the following link:\\n\\n - http://min.us/m%s\\n\\n\"", "%", "(", "gallery", ".", "reader_id", ",", ")", "msg_str", "+=", "\"The individual files can be downloaded from the following \"", "msg_str", "+=", "\"links:\\n\\n\"", "for", "item", ",", "name", "in", "item_map", ".", "items", "(", ")", ":", "msg_str", "+=", "' - http://i.min.us/j%s%s %s\\n'", "%", "(", "item", ",", "os", ".", "path", ".", "splitext", "(", "name", ")", "[", "1", "]", ",", "name", ")", "msg", "=", "MIMEText", "(", "msg_str", ")", "msg", ".", "add_header", "(", "'Format'", ",", "'Flowed'", ")", "return", "msg" ]
Create an email message object which implements the email.message.Message interface and which has the files to be shared uploaded to min.us and links placed in the message body.
[ "Create", "an", "email", "message", "object", "which", "implements", "the", "email", ".", "message", ".", "Message", "interface", "and", "which", "has", "the", "files", "to", "be", "shared", "uploaded", "to", "min", ".", "us", "and", "links", "placed", "in", "the", "message", "body", "." ]
74a7116362ba5b45635ab247472b25cfbdece4ee
https://github.com/rjw57/throw/blob/74a7116362ba5b45635ab247472b25cfbdece4ee/throw/minus_renderer.py#L7-L46
243,185
krukas/Trionyx
trionyx/trionyx/layouts.py
account_overview
def account_overview(object): """Create layout for user profile""" return Layout( Container( Row( Column2( Panel( 'Avatar', Img(src="{}{}".format(settings.MEDIA_URL, object.avatar)), collapse=True, ), ), Column10( Panel( 'Account information', DescriptionList( 'email', 'first_name', 'last_name', ), ) ), ) ) )
python
def account_overview(object): """Create layout for user profile""" return Layout( Container( Row( Column2( Panel( 'Avatar', Img(src="{}{}".format(settings.MEDIA_URL, object.avatar)), collapse=True, ), ), Column10( Panel( 'Account information', DescriptionList( 'email', 'first_name', 'last_name', ), ) ), ) ) )
[ "def", "account_overview", "(", "object", ")", ":", "return", "Layout", "(", "Container", "(", "Row", "(", "Column2", "(", "Panel", "(", "'Avatar'", ",", "Img", "(", "src", "=", "\"{}{}\"", ".", "format", "(", "settings", ".", "MEDIA_URL", ",", "object", ".", "avatar", ")", ")", ",", "collapse", "=", "True", ",", ")", ",", ")", ",", "Column10", "(", "Panel", "(", "'Account information'", ",", "DescriptionList", "(", "'email'", ",", "'first_name'", ",", "'last_name'", ",", ")", ",", ")", ")", ",", ")", ")", ")" ]
Create layout for user profile
[ "Create", "layout", "for", "user", "profile" ]
edac132cc0797190153f2e60bc7e88cb50e80da6
https://github.com/krukas/Trionyx/blob/edac132cc0797190153f2e60bc7e88cb50e80da6/trionyx/trionyx/layouts.py#L15-L39
243,186
zagfai/webtul
webtul/cache.py
Cache.add_exp_key
def add_exp_key(self, key, value, ex): "Expired in seconds" return self.c.set(key, value, ex)
python
def add_exp_key(self, key, value, ex): "Expired in seconds" return self.c.set(key, value, ex)
[ "def", "add_exp_key", "(", "self", ",", "key", ",", "value", ",", "ex", ")", ":", "return", "self", ".", "c", ".", "set", "(", "key", ",", "value", ",", "ex", ")" ]
Expired in seconds
[ "Expired", "in", "seconds" ]
58c49928070b56ef54a45b4af20d800b269ad8ce
https://github.com/zagfai/webtul/blob/58c49928070b56ef54a45b4af20d800b269ad8ce/webtul/cache.py#L50-L52
243,187
Vito2015/pyextend
pyextend/core/itertools.py
unpack
def unpack(iterable, count, fill=None): """ The iter data unpack function. Example 1: In[1]: source = 'abc' In[2]: a, b = safe_unpack(source, 2) In[3]: print(a, b) a b Example 2: In[1]: source = 'abc' In[2]: a, b, c, d = safe_unpack(source, 4) In[3]: print(a, b, c, d) a b None None """ iterable = list(enumerate(iterable)) cnt = count if count <= len(iterable) else len(iterable) results = [iterable[i][1] for i in range(cnt)] # results[len(results):len(results)] = [fill for i in range(count-cnt)] results = merge(results, [fill for i in range(count-cnt)]) return tuple(results)
python
def unpack(iterable, count, fill=None): """ The iter data unpack function. Example 1: In[1]: source = 'abc' In[2]: a, b = safe_unpack(source, 2) In[3]: print(a, b) a b Example 2: In[1]: source = 'abc' In[2]: a, b, c, d = safe_unpack(source, 4) In[3]: print(a, b, c, d) a b None None """ iterable = list(enumerate(iterable)) cnt = count if count <= len(iterable) else len(iterable) results = [iterable[i][1] for i in range(cnt)] # results[len(results):len(results)] = [fill for i in range(count-cnt)] results = merge(results, [fill for i in range(count-cnt)]) return tuple(results)
[ "def", "unpack", "(", "iterable", ",", "count", ",", "fill", "=", "None", ")", ":", "iterable", "=", "list", "(", "enumerate", "(", "iterable", ")", ")", "cnt", "=", "count", "if", "count", "<=", "len", "(", "iterable", ")", "else", "len", "(", "iterable", ")", "results", "=", "[", "iterable", "[", "i", "]", "[", "1", "]", "for", "i", "in", "range", "(", "cnt", ")", "]", "# results[len(results):len(results)] = [fill for i in range(count-cnt)]", "results", "=", "merge", "(", "results", ",", "[", "fill", "for", "i", "in", "range", "(", "count", "-", "cnt", ")", "]", ")", "return", "tuple", "(", "results", ")" ]
The iter data unpack function. Example 1: In[1]: source = 'abc' In[2]: a, b = safe_unpack(source, 2) In[3]: print(a, b) a b Example 2: In[1]: source = 'abc' In[2]: a, b, c, d = safe_unpack(source, 4) In[3]: print(a, b, c, d) a b None None
[ "The", "iter", "data", "unpack", "function", "." ]
36861dfe1087e437ffe9b5a1da9345c85b4fa4a1
https://github.com/Vito2015/pyextend/blob/36861dfe1087e437ffe9b5a1da9345c85b4fa4a1/pyextend/core/itertools.py#L19-L41
243,188
CitrineInformatics/dftparse
dftparse/util.py
transpose_list
def transpose_list(list_of_dicts): """Transpose a list of dicts to a dict of lists :param list_of_dicts: to transpose, as in the output from a parse call :return: Dict of lists """ res = {} for d in list_of_dicts: for k, v in d.items(): if k in res: res[k].append(v) else: res[k] = [v] return res
python
def transpose_list(list_of_dicts): """Transpose a list of dicts to a dict of lists :param list_of_dicts: to transpose, as in the output from a parse call :return: Dict of lists """ res = {} for d in list_of_dicts: for k, v in d.items(): if k in res: res[k].append(v) else: res[k] = [v] return res
[ "def", "transpose_list", "(", "list_of_dicts", ")", ":", "res", "=", "{", "}", "for", "d", "in", "list_of_dicts", ":", "for", "k", ",", "v", "in", "d", ".", "items", "(", ")", ":", "if", "k", "in", "res", ":", "res", "[", "k", "]", ".", "append", "(", "v", ")", "else", ":", "res", "[", "k", "]", "=", "[", "v", "]", "return", "res" ]
Transpose a list of dicts to a dict of lists :param list_of_dicts: to transpose, as in the output from a parse call :return: Dict of lists
[ "Transpose", "a", "list", "of", "dicts", "to", "a", "dict", "of", "lists" ]
53a1bf19945cf1c195d6af9beccb3d1b7f4a4c1d
https://github.com/CitrineInformatics/dftparse/blob/53a1bf19945cf1c195d6af9beccb3d1b7f4a4c1d/dftparse/util.py#L10-L23
243,189
JNRowe/jnrbase
jnrbase/debug.py
noisy_wrap
def noisy_wrap(__func: Callable) -> Callable: """Decorator to enable DebugPrint for a given function. Args: __func: Function to wrap Returns: Wrapped function """ # pylint: disable=missing-docstring def wrapper(*args, **kwargs): DebugPrint.enable() try: __func(*args, **kwargs) finally: DebugPrint.disable() return wrapper
python
def noisy_wrap(__func: Callable) -> Callable: """Decorator to enable DebugPrint for a given function. Args: __func: Function to wrap Returns: Wrapped function """ # pylint: disable=missing-docstring def wrapper(*args, **kwargs): DebugPrint.enable() try: __func(*args, **kwargs) finally: DebugPrint.disable() return wrapper
[ "def", "noisy_wrap", "(", "__func", ":", "Callable", ")", "->", "Callable", ":", "# pylint: disable=missing-docstring", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "DebugPrint", ".", "enable", "(", ")", "try", ":", "__func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "finally", ":", "DebugPrint", ".", "disable", "(", ")", "return", "wrapper" ]
Decorator to enable DebugPrint for a given function. Args: __func: Function to wrap Returns: Wrapped function
[ "Decorator", "to", "enable", "DebugPrint", "for", "a", "given", "function", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/debug.py#L74-L90
243,190
JNRowe/jnrbase
jnrbase/debug.py
on_enter
def on_enter(__msg: Optional[Union[Callable, str]] = None) -> Callable: """Decorator to display a message when entering a function. Args: __msg: Message to display Returns: Wrapped function """ # pylint: disable=missing-docstring def decorator(__func): @wraps(__func) def wrapper(*args, **kwargs): if __msg: print(__msg) else: print('Entering {!r}({!r})'.format(__func.__name__, __func)) return __func(*args, **kwargs) return wrapper if callable(__msg): return on_enter()(__msg) return decorator
python
def on_enter(__msg: Optional[Union[Callable, str]] = None) -> Callable: """Decorator to display a message when entering a function. Args: __msg: Message to display Returns: Wrapped function """ # pylint: disable=missing-docstring def decorator(__func): @wraps(__func) def wrapper(*args, **kwargs): if __msg: print(__msg) else: print('Entering {!r}({!r})'.format(__func.__name__, __func)) return __func(*args, **kwargs) return wrapper if callable(__msg): return on_enter()(__msg) return decorator
[ "def", "on_enter", "(", "__msg", ":", "Optional", "[", "Union", "[", "Callable", ",", "str", "]", "]", "=", "None", ")", "->", "Callable", ":", "# pylint: disable=missing-docstring", "def", "decorator", "(", "__func", ")", ":", "@", "wraps", "(", "__func", ")", "def", "wrapper", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "__msg", ":", "print", "(", "__msg", ")", "else", ":", "print", "(", "'Entering {!r}({!r})'", ".", "format", "(", "__func", ".", "__name__", ",", "__func", ")", ")", "return", "__func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "wrapper", "if", "callable", "(", "__msg", ")", ":", "return", "on_enter", "(", ")", "(", "__msg", ")", "return", "decorator" ]
Decorator to display a message when entering a function. Args: __msg: Message to display Returns: Wrapped function
[ "Decorator", "to", "display", "a", "message", "when", "entering", "a", "function", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/debug.py#L93-L114
243,191
JNRowe/jnrbase
jnrbase/debug.py
DebugPrint.write
def write(self, __text: str) -> None: """Write text to the debug stream. Args: __text: Text to write """ if __text == os.linesep: self.handle.write(__text) else: frame = inspect.currentframe() if frame is None: filename = 'unknown' lineno = 0 else: outer = frame.f_back filename = outer.f_code.co_filename.split(os.sep)[-1] lineno = outer.f_lineno self.handle.write('[{:>15s}:{:03d}] {}'.format(filename[-15:], lineno, __text))
python
def write(self, __text: str) -> None: """Write text to the debug stream. Args: __text: Text to write """ if __text == os.linesep: self.handle.write(__text) else: frame = inspect.currentframe() if frame is None: filename = 'unknown' lineno = 0 else: outer = frame.f_back filename = outer.f_code.co_filename.split(os.sep)[-1] lineno = outer.f_lineno self.handle.write('[{:>15s}:{:03d}] {}'.format(filename[-15:], lineno, __text))
[ "def", "write", "(", "self", ",", "__text", ":", "str", ")", "->", "None", ":", "if", "__text", "==", "os", ".", "linesep", ":", "self", ".", "handle", ".", "write", "(", "__text", ")", "else", ":", "frame", "=", "inspect", ".", "currentframe", "(", ")", "if", "frame", "is", "None", ":", "filename", "=", "'unknown'", "lineno", "=", "0", "else", ":", "outer", "=", "frame", ".", "f_back", "filename", "=", "outer", ".", "f_code", ".", "co_filename", ".", "split", "(", "os", ".", "sep", ")", "[", "-", "1", "]", "lineno", "=", "outer", ".", "f_lineno", "self", ".", "handle", ".", "write", "(", "'[{:>15s}:{:03d}] {}'", ".", "format", "(", "filename", "[", "-", "15", ":", "]", ",", "lineno", ",", "__text", ")", ")" ]
Write text to the debug stream. Args: __text: Text to write
[ "Write", "text", "to", "the", "debug", "stream", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/debug.py#L42-L60
243,192
JNRowe/jnrbase
jnrbase/debug.py
DebugPrint.enable
def enable() -> None: """Patch ``sys.stdout`` to use ``DebugPrint``.""" if not isinstance(sys.stdout, DebugPrint): sys.stdout = DebugPrint(sys.stdout)
python
def enable() -> None: """Patch ``sys.stdout`` to use ``DebugPrint``.""" if not isinstance(sys.stdout, DebugPrint): sys.stdout = DebugPrint(sys.stdout)
[ "def", "enable", "(", ")", "->", "None", ":", "if", "not", "isinstance", "(", "sys", ".", "stdout", ",", "DebugPrint", ")", ":", "sys", ".", "stdout", "=", "DebugPrint", "(", "sys", ".", "stdout", ")" ]
Patch ``sys.stdout`` to use ``DebugPrint``.
[ "Patch", "sys", ".", "stdout", "to", "use", "DebugPrint", "." ]
ae505ef69a9feb739b5f4e62c5a8e6533104d3ea
https://github.com/JNRowe/jnrbase/blob/ae505ef69a9feb739b5f4e62c5a8e6533104d3ea/jnrbase/debug.py#L63-L66
243,193
toastdriven/alligator
alligator/workers.py
Worker.starting
def starting(self): """ Prints a startup message to stdout. """ ident = self.ident() print('{} starting & consuming "{}".'.format(ident, self.to_consume)) if self.max_tasks: print('{} will die after {} tasks.'.format(ident, self.max_tasks)) else: print('{} will never die.'.format(ident))
python
def starting(self): """ Prints a startup message to stdout. """ ident = self.ident() print('{} starting & consuming "{}".'.format(ident, self.to_consume)) if self.max_tasks: print('{} will die after {} tasks.'.format(ident, self.max_tasks)) else: print('{} will never die.'.format(ident))
[ "def", "starting", "(", "self", ")", ":", "ident", "=", "self", ".", "ident", "(", ")", "print", "(", "'{} starting & consuming \"{}\".'", ".", "format", "(", "ident", ",", "self", ".", "to_consume", ")", ")", "if", "self", ".", "max_tasks", ":", "print", "(", "'{} will die after {} tasks.'", ".", "format", "(", "ident", ",", "self", ".", "max_tasks", ")", ")", "else", ":", "print", "(", "'{} will never die.'", ".", "format", "(", "ident", ")", ")" ]
Prints a startup message to stdout.
[ "Prints", "a", "startup", "message", "to", "stdout", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/workers.py#L55-L65
243,194
toastdriven/alligator
alligator/workers.py
Worker.interrupt
def interrupt(self): """ Prints an interrupt message to stdout. """ ident = self.ident() print('{} for "{}" saw interrupt. Finishing in-progress task.'.format( ident, self.to_consume ))
python
def interrupt(self): """ Prints an interrupt message to stdout. """ ident = self.ident() print('{} for "{}" saw interrupt. Finishing in-progress task.'.format( ident, self.to_consume ))
[ "def", "interrupt", "(", "self", ")", ":", "ident", "=", "self", ".", "ident", "(", ")", "print", "(", "'{} for \"{}\" saw interrupt. Finishing in-progress task.'", ".", "format", "(", "ident", ",", "self", ".", "to_consume", ")", ")" ]
Prints an interrupt message to stdout.
[ "Prints", "an", "interrupt", "message", "to", "stdout", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/workers.py#L67-L75
243,195
toastdriven/alligator
alligator/workers.py
Worker.stopping
def stopping(self): """ Prints a shutdown message to stdout. """ ident = self.ident() print('{} for "{}" shutting down. Consumed {} tasks.'.format( ident, self.to_consume, self.tasks_complete ))
python
def stopping(self): """ Prints a shutdown message to stdout. """ ident = self.ident() print('{} for "{}" shutting down. Consumed {} tasks.'.format( ident, self.to_consume, self.tasks_complete ))
[ "def", "stopping", "(", "self", ")", ":", "ident", "=", "self", ".", "ident", "(", ")", "print", "(", "'{} for \"{}\" shutting down. Consumed {} tasks.'", ".", "format", "(", "ident", ",", "self", ".", "to_consume", ",", "self", ".", "tasks_complete", ")", ")" ]
Prints a shutdown message to stdout.
[ "Prints", "a", "shutdown", "message", "to", "stdout", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/workers.py#L77-L86
243,196
toastdriven/alligator
alligator/workers.py
Worker.run_forever
def run_forever(self): """ Causes the worker to run either forever or until the ``Worker.max_tasks`` are reached. """ self.starting() self.keep_running = True def handle(signum, frame): self.interrupt() self.keep_running = False signal.signal(signal.SIGINT, handle) while self.keep_running: if self.max_tasks and self.tasks_complete >= self.max_tasks: self.stopping() if self.gator.len(): result = self.gator.pop() self.tasks_complete += 1 self.result(result) if self.nap_time >= 0: time.sleep(self.nap_time) return 0
python
def run_forever(self): """ Causes the worker to run either forever or until the ``Worker.max_tasks`` are reached. """ self.starting() self.keep_running = True def handle(signum, frame): self.interrupt() self.keep_running = False signal.signal(signal.SIGINT, handle) while self.keep_running: if self.max_tasks and self.tasks_complete >= self.max_tasks: self.stopping() if self.gator.len(): result = self.gator.pop() self.tasks_complete += 1 self.result(result) if self.nap_time >= 0: time.sleep(self.nap_time) return 0
[ "def", "run_forever", "(", "self", ")", ":", "self", ".", "starting", "(", ")", "self", ".", "keep_running", "=", "True", "def", "handle", "(", "signum", ",", "frame", ")", ":", "self", ".", "interrupt", "(", ")", "self", ".", "keep_running", "=", "False", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "handle", ")", "while", "self", ".", "keep_running", ":", "if", "self", ".", "max_tasks", "and", "self", ".", "tasks_complete", ">=", "self", ".", "max_tasks", ":", "self", ".", "stopping", "(", ")", "if", "self", ".", "gator", ".", "len", "(", ")", ":", "result", "=", "self", ".", "gator", ".", "pop", "(", ")", "self", ".", "tasks_complete", "+=", "1", "self", ".", "result", "(", "result", ")", "if", "self", ".", "nap_time", ">=", "0", ":", "time", ".", "sleep", "(", "self", ".", "nap_time", ")", "return", "0" ]
Causes the worker to run either forever or until the ``Worker.max_tasks`` are reached.
[ "Causes", "the", "worker", "to", "run", "either", "forever", "or", "until", "the", "Worker", ".", "max_tasks", "are", "reached", "." ]
f18bcb35b350fc6b0886393f5246d69c892b36c7
https://github.com/toastdriven/alligator/blob/f18bcb35b350fc6b0886393f5246d69c892b36c7/alligator/workers.py#L96-L122
243,197
naphatkrit/temp-utils
temp_utils/contextmanagers.py
chdir
def chdir(path): """Change the working directory to `path` for the duration of this context manager. :param str path: The path to change to """ cur_cwd = os.getcwd() os.chdir(path) try: yield finally: os.chdir(cur_cwd)
python
def chdir(path): """Change the working directory to `path` for the duration of this context manager. :param str path: The path to change to """ cur_cwd = os.getcwd() os.chdir(path) try: yield finally: os.chdir(cur_cwd)
[ "def", "chdir", "(", "path", ")", ":", "cur_cwd", "=", "os", ".", "getcwd", "(", ")", "os", ".", "chdir", "(", "path", ")", "try", ":", "yield", "finally", ":", "os", ".", "chdir", "(", "cur_cwd", ")" ]
Change the working directory to `path` for the duration of this context manager. :param str path: The path to change to
[ "Change", "the", "working", "directory", "to", "path", "for", "the", "duration", "of", "this", "context", "manager", "." ]
4b0cb5a76fcaa9f3b5db05ed1f5f7a1979a86d2c
https://github.com/naphatkrit/temp-utils/blob/4b0cb5a76fcaa9f3b5db05ed1f5f7a1979a86d2c/temp_utils/contextmanagers.py#L9-L20
243,198
naphatkrit/temp-utils
temp_utils/contextmanagers.py
temp_file
def temp_file(): """Create a temporary file for the duration of this context manager, deleting it afterwards. Yields: str - path to the file """ fd, path = tempfile.mkstemp() os.close(fd) try: yield path finally: os.remove(path)
python
def temp_file(): """Create a temporary file for the duration of this context manager, deleting it afterwards. Yields: str - path to the file """ fd, path = tempfile.mkstemp() os.close(fd) try: yield path finally: os.remove(path)
[ "def", "temp_file", "(", ")", ":", "fd", ",", "path", "=", "tempfile", ".", "mkstemp", "(", ")", "os", ".", "close", "(", "fd", ")", "try", ":", "yield", "path", "finally", ":", "os", ".", "remove", "(", "path", ")" ]
Create a temporary file for the duration of this context manager, deleting it afterwards. Yields: str - path to the file
[ "Create", "a", "temporary", "file", "for", "the", "duration", "of", "this", "context", "manager", "deleting", "it", "afterwards", "." ]
4b0cb5a76fcaa9f3b5db05ed1f5f7a1979a86d2c
https://github.com/naphatkrit/temp-utils/blob/4b0cb5a76fcaa9f3b5db05ed1f5f7a1979a86d2c/temp_utils/contextmanagers.py#L39-L51
243,199
maxfischer2781/chainlet
docs/conf.py
skip_pickle_inject
def skip_pickle_inject(app, what, name, obj, skip, options): """skip global wrapper._raw_slave names used only for pickle support""" if name.endswith('._raw_slave'): return True return None
python
def skip_pickle_inject(app, what, name, obj, skip, options): """skip global wrapper._raw_slave names used only for pickle support""" if name.endswith('._raw_slave'): return True return None
[ "def", "skip_pickle_inject", "(", "app", ",", "what", ",", "name", ",", "obj", ",", "skip", ",", "options", ")", ":", "if", "name", ".", "endswith", "(", "'._raw_slave'", ")", ":", "return", "True", "return", "None" ]
skip global wrapper._raw_slave names used only for pickle support
[ "skip", "global", "wrapper", ".", "_raw_slave", "names", "used", "only", "for", "pickle", "support" ]
4e17f9992b4780bd0d9309202e2847df640bffe8
https://github.com/maxfischer2781/chainlet/blob/4e17f9992b4780bd0d9309202e2847df640bffe8/docs/conf.py#L170-L174