idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
3,300
def set_until ( self , frame , lineno = None ) : if lineno is None : lineno = frame . f_lineno + 1 self . _set_stopinfo ( frame , lineno )
Stop when the current line number in frame is greater than lineno or when returning from frame .
3,301
def set_trace ( self , frame = None ) : self . settrace ( False ) if not frame : frame = sys . _getframe ( ) . f_back frame . f_trace = self . trace_dispatch self . reset ( ignore_first_call_event = False , botframe = self . botframe ) self . topframe = frame while frame : if frame is self . botframe : break botframe =...
Start debugging from frame .
3,302
def listdir ( directory ) : file_names = list ( ) for filename in os . listdir ( directory ) : file_path = os . path . join ( directory , filename ) if os . path . isdir ( file_path ) : filename = f'{filename}{os.path.sep}' file_names . append ( filename ) return file_names
Returns list of nested files and directories for local directory by path
3,303
def get_options ( option_type , from_options ) : _options = dict ( ) for key in option_type . keys : key_with_prefix = f'{option_type.prefix}{key}' if key not in from_options and key_with_prefix not in from_options : _options [ key ] = '' elif key in from_options : _options [ key ] = from_options . get ( key ) else : _...
Extract options for specified option type from all options
3,304
def get_headers ( self , action , headers_ext = None ) : if action in Client . http_header : try : headers = Client . http_header [ action ] . copy ( ) except AttributeError : headers = Client . http_header [ action ] [ : ] else : headers = list ( ) if headers_ext : headers . extend ( headers_ext ) if self . webdav . t...
Returns HTTP headers of specified WebDAV actions .
3,305
def execute_request ( self , action , path , data = None , headers_ext = None ) : response = self . session . request ( method = Client . requests [ action ] , url = self . get_url ( path ) , auth = self . webdav . auth , headers = self . get_headers ( action , headers_ext ) , timeout = self . timeout , data = data ) i...
Generate request to WebDAV server for specified action and path and execute it .
3,306
def valid ( self ) : return True if self . webdav . valid ( ) and self . proxy . valid ( ) else False
Validates of WebDAV and proxy settings .
3,307
def download_from ( self , buff , remote_path ) : urn = Urn ( remote_path ) if self . is_dir ( urn . path ( ) ) : raise OptionNotValid ( name = 'remote_path' , value = remote_path ) if not self . check ( urn . path ( ) ) : raise RemoteResourceNotFound ( urn . path ( ) ) response = self . execute_request ( action = 'dow...
Downloads file from WebDAV and writes it in buffer .
3,308
def download_directory ( self , remote_path , local_path , progress = None ) : urn = Urn ( remote_path , directory = True ) if not self . is_dir ( urn . path ( ) ) : raise OptionNotValid ( name = 'remote_path' , value = remote_path ) if os . path . exists ( local_path ) : shutil . rmtree ( local_path ) os . makedirs ( ...
Downloads directory and downloads all nested files and directories from remote WebDAV to local . If there is something on local path it deletes directories and files then creates new .
3,309
def download_sync ( self , remote_path , local_path , callback = None ) : self . download ( local_path = local_path , remote_path = remote_path ) if callback : callback ( )
Downloads remote resources from WebDAV server synchronously .
3,310
def download_async ( self , remote_path , local_path , callback = None ) : target = ( lambda : self . download_sync ( local_path = local_path , remote_path = remote_path , callback = callback ) ) threading . Thread ( target = target ) . start ( )
Downloads remote resources from WebDAV server asynchronously
3,311
def upload_directory ( self , remote_path , local_path , progress = None ) : urn = Urn ( remote_path , directory = True ) if not urn . is_dir ( ) : raise OptionNotValid ( name = 'remote_path' , value = remote_path ) if not os . path . isdir ( local_path ) : raise OptionNotValid ( name = 'local_path' , value = local_pat...
Uploads directory to remote path on WebDAV server . In case directory is exist on remote server it will delete it and then upload directory with nested files and directories .
3,312
def upload_sync ( self , remote_path , local_path , callback = None ) : self . upload ( local_path = local_path , remote_path = remote_path ) if callback : callback ( )
Uploads resource to remote path on WebDAV server synchronously . In case resource is directory it will upload all nested files and directories .
3,313
def upload_async ( self , remote_path , local_path , callback = None ) : target = ( lambda : self . upload_sync ( local_path = local_path , remote_path = remote_path , callback = callback ) ) threading . Thread ( target = target ) . start ( )
Uploads resource to remote path on WebDAV server asynchronously . In case resource is directory it will upload all nested files and directories .
3,314
def parse_get_list_response ( content ) : try : tree = etree . fromstring ( content ) hrees = [ Urn . separate + unquote ( urlsplit ( hree . text ) . path ) for hree in tree . findall ( './/{DAV:}href' ) ] return [ Urn ( hree ) for hree in hrees ] except etree . XMLSyntaxError : return list ( )
Parses of response content XML from WebDAV server and extract file and directory names .
3,315
def create_free_space_request_content ( ) : root = etree . Element ( 'propfind' , xmlns = 'DAV:' ) prop = etree . SubElement ( root , 'prop' ) etree . SubElement ( prop , 'quota-available-bytes' ) etree . SubElement ( prop , 'quota-used-bytes' ) tree = etree . ElementTree ( root ) return WebDavXmlUtils . etree_to_strin...
Creates an XML for requesting of free space on remote WebDAV server .
3,316
def parse_free_space_response ( content , hostname ) : try : tree = etree . fromstring ( content ) node = tree . find ( './/{DAV:}quota-available-bytes' ) if node is not None : return int ( node . text ) else : raise MethodNotSupported ( name = 'free' , server = hostname ) except TypeError : raise MethodNotSupported ( ...
Parses of response content XML from WebDAV server and extract an amount of free space .
3,317
def create_get_property_request_content ( option ) : root = etree . Element ( 'propfind' , xmlns = 'DAV:' ) prop = etree . SubElement ( root , 'prop' ) etree . SubElement ( prop , option . get ( 'name' , '' ) , xmlns = option . get ( 'namespace' , '' ) ) tree = etree . ElementTree ( root ) return WebDavXmlUtils . etree...
Creates an XML for requesting of getting a property value of remote WebDAV resource .
3,318
def parse_get_property_response ( content , name ) : tree = etree . fromstring ( content ) return tree . xpath ( '//*[local-name() = $name]' , name = name ) [ 0 ] . text
Parses of response content XML from WebDAV server for getting metadata property value for some resource .
3,319
def create_set_property_batch_request_content ( options ) : root_node = etree . Element ( 'propertyupdate' , xmlns = 'DAV:' ) set_node = etree . SubElement ( root_node , 'set' ) prop_node = etree . SubElement ( set_node , 'prop' ) for option in options : opt_node = etree . SubElement ( prop_node , option [ 'name' ] , x...
Creates an XML for requesting of setting a property values for remote WebDAV resource in batch .
3,320
def etree_to_string ( tree ) : buff = BytesIO ( ) tree . write ( buff , xml_declaration = True , encoding = 'UTF-8' ) return buff . getvalue ( )
Creates string from lxml . etree . ElementTree with XML declaration and UTF - 8 encoding .
3,321
def extract_response_for_path ( content , path , hostname ) : try : tree = etree . fromstring ( content ) responses = tree . findall ( '{DAV:}response' ) n_path = Urn . normalize_path ( path ) for resp in responses : href = resp . findtext ( '{DAV:}href' ) if Urn . compare_path ( n_path , href ) is True : return resp r...
Extracts single response for specified remote resource .
3,322
def cleanup ( config_dir ) : stdout_path = os . path . join ( config_dir , 'pueue.stdout' ) stderr_path = os . path . join ( config_dir , 'pueue.stderr' ) if os . _exists ( stdout_path ) : os . remove ( stdout_path ) if os . _exists ( stderr_path ) : os . remove ( stderr_path ) socketPath = os . path . join ( config_di...
Remove temporary stderr and stdout files as well as the daemon socket .
3,323
def get_descriptor_output ( descriptor , key , handler = None ) : line = 'stub' lines = '' while line != '' : try : line = descriptor . readline ( ) lines += line except UnicodeDecodeError : error_msg = "Error while decoding output of process {}" . format ( key ) if handler : handler . logger . error ( "{} with command...
Get the descriptor output and handle incorrect UTF - 8 encoding of subprocess logs .
3,324
def request ( self , hash_ , quickkey , doc_type , page = None , output = None , size_id = None , metadata = None , request_conversion_only = None ) : if len ( hash_ ) > 4 : hash_ = hash_ [ : 4 ] query = QueryParams ( { 'quickkey' : quickkey , 'doc_type' : doc_type , 'page' : page , 'output' : output , 'size_id' : size...
Query conversion server
3,325
def bp_commands ( self , frame , breakpoint_hits ) : effective_bp_list , temporaries = breakpoint_hits silent = True doprompt = False atleast_one_cmd = False for bp in effective_bp_list : if bp in self . commands : if not atleast_one_cmd : atleast_one_cmd = True self . setup ( frame , None ) lastcmd_back = self . lastc...
Call every command that was set for the current active breakpoints .
3,326
def precmd ( self , line ) : if not line . strip ( ) : return line args = line . split ( ) while args [ 0 ] in self . aliases : line = self . aliases [ args [ 0 ] ] ii = 1 for tmpArg in args [ 1 : ] : line = line . replace ( "%" + str ( ii ) , tmpArg ) ii += 1 line = line . replace ( "%*" , ' ' . join ( args [ 1 : ] ) ...
Handle alias expansion and ;; separator .
3,327
def onecmd ( self , line ) : if not self . commands_defining : return cmd . Cmd . onecmd ( self , line ) else : return self . handle_command_def ( line )
Interpret the argument as though it had been typed in response to the prompt .
3,328
def handle_command_def ( self , line ) : cmd , arg , line = self . parseline ( line ) if not cmd : return if cmd == 'silent' : self . commands_silent [ self . commands_bnum ] = True return elif cmd == 'end' : self . cmdqueue = [ ] return 1 cmdlist = self . commands [ self . commands_bnum ] if arg : cmdlist . append ( c...
Handles one command line during command list definition .
3,329
def defaultFile ( self ) : filename = self . curframe . f_code . co_filename if filename == '<string>' and self . mainpyfile : filename = self . mainpyfile return filename
Produce a reasonable default .
3,330
def do_retval ( self , arg ) : locals = self . get_locals ( self . curframe ) if '__return__' in locals : self . message ( bdb . safe_repr ( locals [ '__return__' ] ) ) else : self . error ( 'Not yet returned!' )
retval Print the return value for the last return of a function .
3,331
def do_p ( self , arg ) : try : self . message ( bdb . safe_repr ( self . _getval ( arg ) ) ) except Exception : pass
p expression Print the value of the expression .
3,332
def do_pp ( self , arg ) : obj = self . _getval ( arg ) try : repr ( obj ) except Exception : self . message ( bdb . safe_repr ( obj ) ) else : self . message ( pprint . pformat ( obj ) )
pp expression Pretty - print the value of the expression .
3,333
def do_longlist ( self , arg ) : filename = self . curframe . f_code . co_filename breaklist = self . get_file_breaks ( filename ) try : lines , lineno = getsourcelines ( self . curframe , self . get_locals ( self . curframe ) ) except IOError as err : self . error ( err ) return self . _print_lines ( lines , lineno , ...
longlist | ll List the whole source code for the current function or frame .
3,334
def do_source ( self , arg ) : try : obj = self . _getval ( arg ) except Exception : return try : lines , lineno = getsourcelines ( obj , self . get_locals ( self . curframe ) ) except ( IOError , TypeError ) as err : self . error ( err ) return self . _print_lines ( lines , lineno )
source expression Try to get source code for the given object and display it .
3,335
def _print_lines ( self , lines , start , breaks = ( ) , frame = None ) : if frame : current_lineno = frame . f_lineno exc_lineno = self . tb_lineno . get ( frame , - 1 ) else : current_lineno = exc_lineno = - 1 for lineno , line in enumerate ( lines , start ) : s = str ( lineno ) . rjust ( 3 ) if len ( s ) < 4 : s += ...
Print a range of lines .
3,336
def do_whatis ( self , arg ) : try : value = self . _getval ( arg ) except Exception : return code = None try : code = value . __code__ except Exception : pass if code : self . message ( 'Function %s' % code . co_name ) return try : code = value . __func__ . __code__ except Exception : pass if code : self . message ( '...
whatis arg Print the type of the argument .
3,337
def do_unalias ( self , arg ) : args = arg . split ( ) if len ( args ) == 0 : return if args [ 0 ] in self . aliases : del self . aliases [ args [ 0 ] ]
unalias name Delete the specified alias .
3,338
def read ( self , limit = - 1 ) : remaining = self . len - self . parent_fd . tell ( ) + self . offset if limit > remaining or limit == - 1 : limit = remaining return self . parent_fd . read ( limit )
Read content . See file . read
3,339
def seek ( self , offset , whence = os . SEEK_SET ) : pos = None if whence == os . SEEK_SET : pos = self . offset + offset elif whence == os . SEEK_CUR : pos = self . tell ( ) + offset elif whence == os . SEEK_END : pos = self . offset + self . len + offset else : raise ValueError ( "invalid whence {}" . format ( whenc...
Seek to position in stream see file . seek
3,340
def close ( self ) : try : self . parent_fd . fileno ( ) except io . UnsupportedOperation : logger . debug ( "Not closing parent_fd - reusing existing" ) else : self . parent_fd . close ( )
Close file see file . close
3,341
def _build_query ( self , uri , params = None , action_token_type = None ) : if params is None : params = QueryParams ( ) params [ 'response_format' ] = 'json' session_token = None if action_token_type in self . _action_tokens : using_action_token = True session_token = self . _action_tokens [ action_token_type ] else ...
Prepare query string
3,342
def request ( self , action , params = None , action_token_type = None , upload_info = None , headers = None ) : uri = self . _build_uri ( action ) if isinstance ( params , six . text_type ) : query = params else : query = self . _build_query ( uri , params , action_token_type ) if headers is None : headers = { } if up...
Perform request to MediaFire API
3,343
def _regenerate_secret_key ( self ) : if self . _session and 'secret_key' in self . _session : self . _session [ 'secret_key' ] = ( int ( self . _session [ 'secret_key' ] ) * 16807 ) % 2147483647
Regenerate secret key
3,344
def session ( self , value ) : if value is None : self . _session = None return if not isinstance ( value , dict ) : raise ValueError ( "session info is required" ) session_parsed = { } for key in [ "session_token" , "time" , "secret_key" ] : if key not in value : raise ValueError ( "Missing parameter: {}" . format ( k...
Set session token
3,345
def set_action_token ( self , type_ = None , action_token = None ) : if action_token is None : del self . _action_tokens [ type_ ] else : self . _action_tokens [ type_ ] = action_token
Set action tokens
3,346
def _reset ( self ) : self . _socket = None self . _pending = deque ( ) self . _out_buffer = '' self . _buffer = '' self . _identify_response = { } self . last_ready_sent = 0 self . ready = 0
Reset all of our stateful variables
3,347
def connect ( self , force = False ) : if not force and self . alive ( ) : return True self . _reset ( ) with self . _socket_lock : try : logger . info ( 'Creating socket...' ) self . _socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) self . _socket . settimeout ( self . _timeout ) logger . info ( 'C...
Establish a connection
3,348
def close ( self ) : try : while self . pending ( ) : self . flush ( ) except socket . error : pass with self . _socket_lock : try : if self . _socket : self . _socket . close ( ) finally : self . _reset ( )
Close our connection
3,349
def socket ( self , blocking = True ) : if self . _socket_lock . acquire ( blocking ) : try : yield self . _socket finally : self . _socket_lock . release ( )
Blockingly yield the socket
3,350
def identified ( self , res ) : try : res . data = json . loads ( res . data ) self . _identify_response = res . data logger . info ( 'Got identify response: %s' , res . data ) except : logger . warn ( 'Server does not support feature negotiation' ) self . _identify_response = { } self . max_rdy_count = self . _identif...
Handle a response to our identify command . Returns response
3,351
def setblocking ( self , blocking ) : for sock in self . socket ( ) : sock . setblocking ( blocking ) self . _blocking = blocking
Set whether or not this message is blocking
3,352
def flush ( self ) : total = 0 for sock in self . socket ( blocking = False ) : pending = self . _pending data = self . _out_buffer or '' . join ( pending . popleft ( ) for _ in xrange ( len ( pending ) ) ) try : total = sock . send ( data ) except socket . error as exc : if exc . args [ 0 ] not in self . WOULD_BLOCK_E...
Flush some of the waiting messages returns count written
3,353
def send ( self , command , message = None ) : if message : joined = command + constants . NL + util . pack ( message ) else : joined = command + constants . NL if self . _blocking : for sock in self . socket ( ) : sock . sendall ( joined ) else : self . _pending . append ( joined )
Send a command over the socket with length endcoded
3,354
def identify ( self , data ) : return self . send ( constants . IDENTIFY , json . dumps ( data ) )
Send an identification message
3,355
def pub ( self , topic , message ) : return self . send ( ' ' . join ( ( constants . PUB , topic ) ) , message )
Publish to a topic
3,356
def mpub ( self , topic , * messages ) : return self . send ( constants . MPUB + ' ' + topic , messages )
Publish multiple messages to a topic
3,357
def rdy ( self , count ) : self . ready = count self . last_ready_sent = count return self . send ( constants . RDY + ' ' + str ( count ) )
Indicate that you re ready to receive
3,358
def _read ( self , limit = 1000 ) : for sock in self . socket ( ) : if sock is None : return [ ] try : packet = sock . recv ( 4096 ) except socket . timeout : return [ ] except socket . error as exc : if exc . args [ 0 ] in self . WOULD_BLOCK_ERRS : return [ ] else : raise self . _buffer += packet responses = [ ] total...
Return all the responses read
3,359
def read ( self ) : responses = self . _read ( ) self . ready -= sum ( map ( int , ( r . frame_type == Message . FRAME_TYPE for r in responses ) ) ) return responses
Responses from an established socket
3,360
def discover ( self , topic ) : logger . info ( 'Discovering on topic %s' , topic ) producers = [ ] for lookupd in self . _lookupd : logger . info ( 'Discovering on %s' , lookupd ) try : for producer in lookupd . lookup ( topic ) [ 'producers' ] : logger . info ( 'Found producer %s on %s' , producer , lookupd ) produce...
Run the discovery mechanism
3,361
def check_connections ( self ) : logger . info ( 'Checking connections' ) if self . _lookupd : self . discover ( self . _topic ) for hostspec in self . _nsqd_tcp_addresses : logger . debug ( 'Checking nsqd instance %s' , hostspec ) host , port = hostspec . split ( ':' ) port = int ( port ) conn = self . _connections . ...
Connect to all the appropriate instances
3,362
def connection_checker ( self ) : thread = ConnectionChecker ( self ) logger . info ( 'Starting connection-checker thread' ) thread . start ( ) try : yield thread finally : logger . info ( 'Stopping connection-checker' ) thread . stop ( ) logger . info ( 'Joining connection-checker' ) thread . join ( )
Run periodic reconnection checks
3,363
def connect ( self , host , port ) : conn = connection . Connection ( host , port , reconnection_backoff = self . _reconnection_backoff , auth_secret = self . _auth_secret , timeout = self . _connect_timeout , ** self . _identify_options ) if conn . alive ( ) : conn . setblocking ( 0 ) self . add ( conn ) return conn
Connect to the provided host port
3,364
def add ( self , connection ) : key = ( connection . host , connection . port ) with self . _lock : if key not in self . _connections : self . _connections [ key ] = connection self . added ( connection ) return connection else : return None
Add a connection
3,365
def remove ( self , connection ) : key = ( connection . host , connection . port ) with self . _lock : found = self . _connections . pop ( key , None ) try : self . close_connection ( found ) except Exception as exc : logger . warn ( 'Failed to close %s: %s' , connection , exc ) return found
Remove a connection
3,366
def read ( self ) : connections = [ c for c in self . connections ( ) if c . alive ( ) ] if not connections : time . sleep ( self . _timeout ) return [ ] writes = [ c for c in connections if c . pending ( ) ] try : readable , writable , exceptable = select . select ( connections , writes , connections , self . _timeout...
Read from any of the connections that need it
3,367
def random_connection ( self ) : yield random . choice ( [ conn for conn in self . connections ( ) if conn . alive ( ) ] )
Pick a random living connection
3,368
def wait_response ( self ) : responses = self . read ( ) while not responses : responses = self . read ( ) return responses
Wait for a response
3,369
def pub ( self , topic , message ) : with self . random_connection ( ) as client : client . pub ( topic , message ) return self . wait_response ( )
Publish the provided message to the provided topic
3,370
def mpub ( self , topic , * messages ) : with self . random_connection ( ) as client : client . mpub ( topic , * messages ) return self . wait_response ( )
Publish messages to a topic
3,371
def create_socket ( self ) : socket_path = os . path . join ( self . config_dir , 'pueue.sock' ) try : if os . path . exists ( socket_path ) : os . remove ( socket_path ) self . socket = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM ) self . socket . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR ...
Create a socket for the daemon depending on the directory location .
3,372
def initialize_directories ( self , root_dir ) : if not root_dir : root_dir = os . path . expanduser ( '~' ) self . config_dir = os . path . join ( root_dir , '.config/pueue' ) if not os . path . exists ( self . config_dir ) : os . makedirs ( self . config_dir )
Create all directories needed for logs and configs .
3,373
def respond_client ( self , answer , socket ) : response = pickle . dumps ( answer , - 1 ) socket . sendall ( response ) self . read_list . remove ( socket ) socket . close ( )
Send an answer to the client .
3,374
def read_config ( self ) : config_file = os . path . join ( self . config_dir , 'pueue.ini' ) self . config = configparser . ConfigParser ( ) if os . path . exists ( config_file ) : try : self . config . read ( config_file ) return except Exception : self . logger . error ( 'Error while parsing config file. Deleting ol...
Read a previous configuration file or create a new with default values .
3,375
def write_config ( self ) : config_file = os . path . join ( self . config_dir , 'pueue.ini' ) with open ( config_file , 'w' ) as file_descriptor : self . config . write ( file_descriptor )
Write the current configuration to the config file .
3,376
def stop_daemon ( self , payload = None ) : kill_signal = signals [ '9' ] self . process_handler . kill_all ( kill_signal , True ) self . running = False return { 'message' : 'Pueue daemon shutting down' , 'status' : 'success' }
Kill current processes and initiate daemon shutdown .
3,377
def set_config ( self , payload ) : self . config [ 'default' ] [ payload [ 'option' ] ] = str ( payload [ 'value' ] ) if payload [ 'option' ] == 'maxProcesses' : self . process_handler . set_max ( payload [ 'value' ] ) if payload [ 'option' ] == 'customShell' : path = payload [ 'value' ] if os . path . isfile ( path )...
Update the current config depending on the payload and save it .
3,378
def pipe_to_process ( self , payload ) : message = payload [ 'input' ] key = payload [ 'key' ] if not self . process_handler . is_running ( key ) : return { 'message' : 'No running process for this key' , 'status' : 'error' } self . process_handler . send_to_process ( message , key ) return { 'message' : 'Message sent'...
Send something to stdin of a specific process .
3,379
def send_status ( self , payload ) : answer = { } data = [ ] if self . paused : answer [ 'status' ] = 'paused' else : answer [ 'status' ] = 'running' if len ( self . queue ) > 0 : data = deepcopy ( self . queue . queue ) for key , item in data . items ( ) : if 'stderr' in item : del item [ 'stderr' ] if 'stdout' in ite...
Send the daemon status and the current queue for displaying .
3,380
def reset_everything ( self , payload ) : kill_signal = signals [ '9' ] self . process_handler . kill_all ( kill_signal , True ) self . process_handler . wait_for_finish ( ) self . reset = True answer = { 'message' : 'Resetting current queue' , 'status' : 'success' } return answer
Kill all processes delete the queue and clean everything up .
3,381
def clear ( self , payload ) : self . logger . rotate ( self . queue ) self . queue . clear ( ) self . logger . write ( self . queue ) answer = { 'message' : 'Finished entries have been removed.' , 'status' : 'success' } return answer
Clear queue from any done or failed entries .
3,382
def edit_command ( self , payload ) : key = payload [ 'key' ] command = payload [ 'command' ] if self . queue [ key ] : if self . queue [ key ] [ 'status' ] in [ 'queued' , 'stashed' ] : self . queue [ key ] [ 'command' ] = command answer = { 'message' : 'Command updated' , 'status' : 'error' } else : answer = { 'messa...
Edit the command of a specific entry .
3,383
def stash ( self , payload ) : succeeded = [ ] failed = [ ] for key in payload [ 'keys' ] : if self . queue . get ( key ) is not None : if self . queue [ key ] [ 'status' ] == 'queued' : self . queue [ key ] [ 'status' ] = 'stashed' succeeded . append ( str ( key ) ) else : failed . append ( str ( key ) ) else : failed...
Stash the specified processes .
3,384
def kill_process ( self , payload ) : kill_signal = signals [ payload [ 'signal' ] . lower ( ) ] kill_shell = payload . get ( 'all' , False ) if payload . get ( 'keys' ) : succeeded = [ ] failed = [ ] for key in payload . get ( 'keys' ) : success = self . process_handler . kill_process ( key , kill_signal , kill_shell ...
Pause the daemon and kill all processes or kill a specific process .
3,385
def remove ( self , payload ) : succeeded = [ ] failed = [ ] for key in payload [ 'keys' ] : running = self . process_handler . is_running ( key ) if not running : removed = self . queue . remove ( key ) if removed : succeeded . append ( str ( key ) ) else : failed . append ( str ( key ) ) else : failed . append ( str ...
Remove specified entries from the queue .
3,386
def switch ( self , payload ) : first = payload [ 'first' ] second = payload [ 'second' ] running = self . process_handler . is_running ( first ) or self . process_handler . is_running ( second ) if running : answer = { 'message' : "Can't switch running processes, " "please stop the processes before switching them." , ...
Switch the two specified entry positions in the queue .
3,387
def restart ( self , payload ) : succeeded = [ ] failed = [ ] for key in payload [ 'keys' ] : restarted = self . queue . restart ( key ) if restarted : succeeded . append ( str ( key ) ) else : failed . append ( str ( key ) ) message = '' if len ( succeeded ) > 0 : message += 'Restarted entries: {}.' . format ( ', ' . ...
Restart the specified entries .
3,388
def sendall ( self , data , flags = 0 ) : count = len ( data ) while count : sent = self . send ( data , flags ) data = data [ sent : ] count -= sent
Same as socket . sendall
3,389
def do_file_show ( client , args ) : for src_uri in args . uris : client . download_file ( src_uri , sys . stdout . buffer ) return True
Output file contents to stdout
3,390
def pub ( self , topic , message ) : return self . post ( 'pub' , params = { 'topic' : topic } , data = message )
Publish a message to a topic
3,391
def mpub ( self , topic , messages , binary = True ) : if binary : return self . post ( 'mpub' , data = pack ( messages ) [ 4 : ] , params = { 'topic' : topic , 'binary' : True } ) elif any ( '\n' in m for m in messages ) : raise ClientException ( 'Use `binary` flag in mpub for messages with newlines' ) else : return s...
Send multiple messages to a topic . Optionally pack the messages
3,392
def clean_stats ( self ) : stats = self . stats ( ) if 'topics' in stats : topics = stats [ 'topics' ] topics = dict ( ( t . pop ( 'topic_name' ) , t ) for t in topics ) for topic , data in topics . items ( ) : if 'channels' in data : channels = data [ 'channels' ] channels = dict ( ( c . pop ( 'channel_name' ) , c ) f...
Stats with topics and channels keyed on topic and channel names
3,393
def execute_add ( args , root_dir = None ) : command = ' ' . join ( args [ 'command' ] ) instruction = { 'command' : command , 'path' : os . getcwd ( ) } print_command_factory ( 'add' ) ( instruction , root_dir )
Add a new command to the daemon queue .
3,394
def execute_edit ( args , root_dir = None ) : EDITOR = os . environ . get ( 'EDITOR' , 'vim' ) key = args [ 'key' ] status = command_factory ( 'status' ) ( { } , root_dir = root_dir ) if not isinstance ( status [ 'data' ] , str ) and key in status [ 'data' ] : if status [ 'data' ] [ key ] [ 'status' ] in [ 'queued' , '...
Edit a existing queue command in the daemon .
3,395
def command_factory ( command ) : def communicate ( body = { } , root_dir = None ) : client = connect_socket ( root_dir ) body [ 'mode' ] = command if 'func' in body : del body [ 'func' ] data_string = pickle . dumps ( body , - 1 ) client . send ( data_string ) response = receive_data ( client ) return response return ...
A factory which returns functions for direct daemon communication .
3,396
def get_descriptor ( self , number ) : stdout_path = os . path . join ( self . config_dir , 'pueue_process_{}.stdout' . format ( number ) ) if os . path . exists ( stdout_path ) : os . remove ( stdout_path ) out_descriptor = open ( stdout_path , 'w+' ) stderr_path = os . path . join ( self . config_dir , 'pueue_process...
Create file descriptors for process output .
3,397
def clean_descriptor ( self , number ) : self . descriptors [ number ] [ 'stdout' ] . close ( ) self . descriptors [ number ] [ 'stderr' ] . close ( ) if os . path . exists ( self . descriptors [ number ] [ 'stdout_path' ] ) : os . remove ( self . descriptors [ number ] [ 'stdout_path' ] ) if os . path . exists ( self ...
Close file descriptor and remove underlying files .
3,398
def check_finished ( self ) : changed = False for key in list ( self . processes . keys ( ) ) : process = self . processes [ key ] process . poll ( ) if process . returncode is not None : if key not in self . stopping : output , error_output = process . communicate ( ) descriptor = self . descriptors [ key ] descriptor...
Poll all processes and handle any finished processes .
3,399
def check_for_new ( self ) : free_slots = self . max_processes - len ( self . processes ) for item in range ( free_slots ) : key = self . queue . next ( ) if key is not None : self . spawn_new ( key )
Check if we can start a new process .