idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
52,800 | def get_all_keys ( self ) : all_keys = [ ] for keys in self . _index . values ( ) : all_keys . extend ( keys ) return all_keys | Get all keys indexed . |
52,801 | def load_from_store ( self ) : if not self . _store : raise AttributeError ( 'No datastore defined!' ) if self . _store . has_blob ( 'all_keys' ) : data = Serializer . deserialize ( self . _store . get_blob ( 'all_keys' ) ) self . load_from_data ( data ) return True elif self . _store . has_blob ( 'all_keys_with_undefined' ) : blob = self . _store . get_blob ( 'all_keys_with_undefined' ) data = Serializer . deserialize ( blob ) self . load_from_data ( data , with_undefined = True ) return True else : return False | Load index from store . |
52,802 | def sort_keys ( self , keys , order = QuerySet . ASCENDING ) : missing_keys = [ key for key in keys if not len ( self . _reverse_index [ key ] ) ] keys_and_values = [ ( key , self . _reverse_index [ key ] [ 0 ] ) for key in keys if key not in missing_keys ] sorted_keys = [ kv [ 0 ] for kv in sorted ( keys_and_values , key = lambda x : x [ 1 ] , reverse = True if order == QuerySet . DESCENDING else False ) ] if order == QuerySet . ASCENDING : return missing_keys + sorted_keys elif order == QuerySet . DESCENDING : return sorted_keys + missing_keys else : raise ValueError ( 'Unexpected order value: {:d}' . format ( order ) ) | Sort keys . |
52,803 | def save_to_data ( self , in_place = False ) : if in_place : return [ list ( self . _index . items ( ) ) , list ( self . _undefined_keys . keys ( ) ) ] return ( [ ( key , values [ : ] ) for key , values in self . _index . items ( ) ] , list ( self . _undefined_keys . keys ( ) ) , ) | Save index to data structure . |
52,804 | def load_from_data ( self , data , with_undefined = False ) : if with_undefined : defined_values , undefined_values = data else : defined_values = data undefined_values = None self . _index = defaultdict ( list , defined_values ) self . _reverse_index = defaultdict ( list ) for key , values in self . _index . items ( ) : for value in values : self . _reverse_index [ value ] . append ( key ) if undefined_values : self . _undefined_keys = { key : True for key in undefined_values } else : self . _undefined_keys = { } | Load index structure . |
52,805 | def get_hash_for ( self , value ) : if isinstance ( value , dict ) and '__ref__' in value : return self . get_hash_for ( value [ '__ref__' ] ) serialized_value = self . _serializer ( value ) if isinstance ( serialized_value , dict ) : return hash ( frozenset ( [ self . get_hash_for ( x ) for x in serialized_value . items ( ) ] ) ) elif isinstance ( serialized_value , ( list , tuple ) ) : return hash ( tuple ( [ self . get_hash_for ( x ) for x in serialized_value ] ) ) return value | Get hash for a given value . |
52,806 | def add_hashed_value ( self , hash_value , store_key ) : if self . _unique and hash_value in self . _index : raise NonUnique ( 'Hash value {} already in index' . format ( hash_value ) ) if store_key not in self . _index [ hash_value ] : self . _index [ hash_value ] . append ( store_key ) if hash_value not in self . _reverse_index [ store_key ] : self . _reverse_index [ store_key ] . append ( hash_value ) | Add hashed value to the index . |
52,807 | def add_key ( self , attributes , store_key ) : undefined = False try : value = self . get_value ( attributes ) except ( KeyError , IndexError ) : undefined = True self . remove_key ( store_key ) if not undefined : if isinstance ( value , ( list , tuple ) ) : values = value hash_value = self . get_hash_for ( value ) self . add_hashed_value ( hash_value , store_key ) else : values = [ value ] for value in values : hash_value = self . get_hash_for ( value ) self . add_hashed_value ( hash_value , store_key ) else : self . add_undefined ( store_key ) | Add key to the index . |
52,808 | def remove_key ( self , store_key ) : if store_key in self . _undefined_keys : del self . _undefined_keys [ store_key ] if store_key in self . _reverse_index : for value in self . _reverse_index [ store_key ] : self . _index [ value ] . remove ( store_key ) del self . _reverse_index [ store_key ] | Remove key from the index . |
52,809 | def _init_cache ( self ) : self . _add_cache = defaultdict ( list ) self . _reverse_add_cache = defaultdict ( list ) self . _undefined_cache = { } self . _remove_cache = { } | Initialize cache . |
52,810 | def commit ( self ) : if ( not self . _add_cache and not self . _remove_cache and not self . _undefined_cache ) : return for store_key , hash_values in self . _add_cache . items ( ) : for hash_value in hash_values : super ( TransactionalIndex , self ) . add_hashed_value ( hash_value , store_key ) for store_key in self . _remove_cache : super ( TransactionalIndex , self ) . remove_key ( store_key ) for store_key in self . _undefined_cache : super ( TransactionalIndex , self ) . add_undefined ( store_key ) if not self . ephemeral : self . save_to_store ( ) self . _init_cache ( ) self . _in_transaction = True | Commit current transaction . |
52,811 | def rollback ( self ) : if not self . _in_transaction : raise NotInTransaction self . _init_cache ( ) self . _in_transaction = False | Drop changes from current transaction . |
52,812 | def add_hashed_value ( self , hash_value , store_key ) : if hash_value not in self . _add_cache [ store_key ] : self . _add_cache [ store_key ] . append ( hash_value ) if store_key not in self . _reverse_add_cache [ hash_value ] : self . _reverse_add_cache [ hash_value ] . append ( store_key ) if store_key in self . _remove_cache : del self . _remove_cache [ store_key ] if store_key in self . _undefined_cache : del self . _undefined_cache [ store_key ] | Add hashed value in the context of the current transaction . |
52,813 | def remove_key ( self , store_key ) : self . _remove_cache [ store_key ] = True if store_key in self . _add_cache : for hash_value in self . _add_cache [ store_key ] : self . _reverse_add_cache [ hash_value ] . remove ( store_key ) del self . _add_cache [ store_key ] if store_key in self . _undefined_cache : del self . _undefined_cache [ store_key ] | Remove key in the context of the current transaction . |
52,814 | def boolean_operator_query ( boolean_operator ) : def _boolean_operator_query ( expressions ) : def _apply_boolean_operator ( query_function , expressions = expressions ) : compiled_expressions = [ compile_query ( e ) for e in expressions ] return reduce ( boolean_operator , [ e ( query_function ) for e in compiled_expressions ] ) return _apply_boolean_operator return _boolean_operator_query | Generate boolean operator checking function . |
52,815 | def filter_query ( key , expression ) : if ( isinstance ( expression , dict ) and len ( expression ) == 1 and list ( expression . keys ( ) ) [ 0 ] . startswith ( '$' ) ) : compiled_expression = compile_query ( expression ) elif callable ( expression ) : def _filter ( index , expression = expression ) : result = [ store_key for value , store_keys in index . get_index ( ) . items ( ) if expression ( value ) for store_key in store_keys ] return result compiled_expression = _filter else : compiled_expression = expression def _get ( query_function , key = key , expression = compiled_expression ) : return query_function ( key , expression ) return _get | Filter documents with a key that satisfies an expression . |
52,816 | def not_query ( expression ) : compiled_expression = compile_query ( expression ) def _not ( index , expression = compiled_expression ) : all_keys = index . get_all_keys ( ) returned_keys = expression ( index ) return [ key for key in all_keys if key not in returned_keys ] return _not | Apply logical not operator to expression . |
52,817 | def comparison_operator_query ( comparison_operator ) : def _comparison_operator_query ( expression ) : def _apply_comparison_operator ( index , expression = expression ) : ev = expression ( ) if callable ( expression ) else expression return [ store_key for value , store_keys in index . get_index ( ) . items ( ) if comparison_operator ( value , ev ) for store_key in store_keys ] return _apply_comparison_operator return _comparison_operator_query | Generate comparison operator checking function . |
52,818 | def exists_query ( expression ) : def _exists ( index , expression = expression ) : ev = expression ( ) if callable ( expression ) else expression if ev : return [ store_key for store_keys in index . get_index ( ) . values ( ) for store_key in store_keys ] else : return index . get_undefined_keys ( ) return _exists | Check that documents have a key that satisfies expression . |
52,819 | def regex_query ( expression ) : def _regex ( index , expression = expression ) : pattern = re . compile ( expression ) return [ store_key for value , store_keys in index . get_index ( ) . items ( ) if ( isinstance ( value , six . string_types ) and re . match ( pattern , value ) ) for store_key in store_keys ] return _regex | Apply regular expression to result of expression . |
52,820 | def all_query ( expression ) : def _all ( index , expression = expression ) : ev = expression ( ) if callable ( expression ) else expression try : iter ( ev ) except TypeError : raise AttributeError ( '$all argument must be an iterable!' ) hashed_ev = [ index . get_hash_for ( v ) for v in ev ] store_keys = set ( ) if len ( hashed_ev ) == 0 : return [ ] store_keys = set ( index . get_keys_for ( hashed_ev [ 0 ] ) ) for value in hashed_ev [ 1 : ] : store_keys &= set ( index . get_keys_for ( value ) ) return list ( store_keys ) return _all | Match arrays that contain all elements in the query . |
52,821 | def in_query ( expression ) : def _in ( index , expression = expression ) : ev = expression ( ) if callable ( expression ) else expression try : iter ( ev ) except TypeError : raise AttributeError ( '$in argument must be an iterable!' ) hashed_ev = [ index . get_hash_for ( v ) for v in ev ] store_keys = set ( ) for value in hashed_ev : store_keys |= set ( index . get_keys_for ( value ) ) return list ( store_keys ) return _in | Match any of the values that exist in an array specified in query . |
52,822 | def compile_query ( query ) : if isinstance ( query , dict ) : expressions = [ ] for key , value in query . items ( ) : if key . startswith ( '$' ) : if key not in query_funcs : raise AttributeError ( 'Invalid operator: {}' . format ( key ) ) expressions . append ( query_funcs [ key ] ( value ) ) else : expressions . append ( filter_query ( key , value ) ) if len ( expressions ) > 1 : return boolean_operator_query ( operator . and_ ) ( expressions ) else : return ( expressions [ 0 ] if len ( expressions ) else lambda query_function : query_function ( None , None ) ) else : return query | Compile each expression in query recursively . |
52,823 | def environment_variable ( key , value ) : if key not in os . environ : previous = None else : previous = os . environ [ key ] os . environ [ key ] = value yield if previous is None : del os . environ [ key ] else : os . environ [ key ] = previous | Temporarily overrides an environment variable . |
52,824 | def host_key_checking ( enable ) : def as_string ( b ) : return b and 'True' or 'False' with environment_variable ( 'ANSIBLE_HOST_KEY_CHECKING' , as_string ( enable ) ) : previous = ansible . constants . HOST_KEY_CHECKING ansible . constants . HOST_KEY_CHECKING = enable yield ansible . constants . HOST_KEY_CHECKING = previous | Temporarily disables host_key_checking which is set globally . |
52,825 | def execute ( self , * args , ** kwargs ) : assert self . is_hooked_up , "the module should be hooked up to the api" if set_global_context : set_global_context ( self . api . options ) if args : self . module_args = module_args = self . get_module_args ( args , kwargs ) else : self . module_args = module_args = kwargs loader = DataLoader ( ) inventory_manager = SourcelessInventoryManager ( loader = loader ) for host , port in self . api . hosts_with_ports : inventory_manager . _inventory . add_host ( host , group = 'all' , port = port ) for key , value in self . api . options . extra_vars . items ( ) : inventory_manager . _inventory . set_variable ( 'all' , key , value ) variable_manager = VariableManager ( loader = loader , inventory = inventory_manager ) play_source = { 'name' : "Suitable Play" , 'hosts' : 'all' , 'gather_facts' : 'no' , 'tasks' : [ { 'action' : { 'module' : self . module_name , 'args' : module_args , } , 'environment' : self . api . environment , } ] } try : play = Play . load ( play_source , variable_manager = variable_manager , loader = loader , ) if self . api . strategy : play . strategy = self . api . strategy log . info ( u'running {}' . format ( u'- {module_name}: {module_args}' . format ( module_name = self . module_name , module_args = module_args ) ) ) start = datetime . utcnow ( ) task_queue_manager = None callback = SilentCallbackModule ( ) verbosity = self . api . options . verbosity == logging . DEBUG and 6 or 0 with ansible_verbosity ( verbosity ) : with host_key_checking ( self . api . host_key_checking ) : kwargs = dict ( inventory = inventory_manager , variable_manager = variable_manager , loader = loader , options = self . api . options , passwords = getattr ( self . api . options , 'passwords' , { } ) , stdout_callback = callback ) if set_global_context : del kwargs [ 'options' ] task_queue_manager = TaskQueueManager ( ** kwargs ) try : task_queue_manager . run ( play ) except SystemExit : if 'pytest' in sys . modules : try : atexit . _run_exitfuncs ( ) except Exception : pass os . kill ( os . getpid ( ) , signal . SIGKILL ) raise finally : if task_queue_manager is not None : task_queue_manager . cleanup ( ) if set_global_context : from ansible . utils . context_objects import GlobalCLIArgs GlobalCLIArgs . _Singleton__instance = None log . debug ( u'took {} to complete' . format ( datetime . utcnow ( ) - start ) ) return self . evaluate_results ( callback ) | Puts args and kwargs in a way ansible can understand . Calls ansible and interprets the result . |
52,826 | def ignore_further_calls_to_server ( self , server ) : log . error ( u'ignoring further calls to {}' . format ( server ) ) self . api . servers . remove ( server ) | Takes a server out of the list . |
52,827 | def evaluate_results ( self , callback ) : for server , result in callback . unreachable . items ( ) : log . error ( u'{} could not be reached' . format ( server ) ) log . debug ( u'ansible-output =>\n{}' . format ( pformat ( result ) ) ) if self . api . ignore_unreachable : continue self . trigger_event ( server , 'on_unreachable_host' , ( self , server ) ) for server , answer in callback . contacted . items ( ) : success = answer [ 'success' ] result = answer [ 'result' ] if result . get ( 'failed' ) : success = False if 'rc' in result : if self . api . is_valid_return_code ( result [ 'rc' ] ) : success = True if not success : log . error ( u'{} failed on {}' . format ( self , server ) ) log . debug ( u'ansible-output =>\n{}' . format ( pformat ( result ) ) ) if self . api . ignore_errors : continue self . trigger_event ( server , 'on_module_error' , ( self , server , result ) ) return RunnerResults ( { 'contacted' : { server : answer [ 'result' ] for server , answer in callback . contacted . items ( ) } } ) | prepare the result of runner call for use with RunnerResults . |
52,828 | def install_strategy_plugins ( directories ) : if isinstance ( directories , str ) : directories = directories . split ( ':' ) for directory in directories : strategy_loader . add_directory ( directory ) | Loads the given strategy plugins which is a list of directories a string with a single directory or a string with multiple directories separated by colon . |
52,829 | def construct_url ( ip_address : str ) -> str : if 'http://' not in ip_address and 'https://' not in ip_address : ip_address = '{}{}' . format ( 'http://' , ip_address ) if ip_address [ - 1 ] == '/' : ip_address = ip_address [ : - 1 ] return ip_address | Construct the URL with a given IP address . |
52,830 | async def update ( self ) -> None : url = '{}{}' . format ( self . url , ENDPOINT ) try : async with self . _session . get ( url ) as response : json_dict = demjson . decode ( await response . text ( ) , strict = False ) except aiohttp . ClientError : json_dict = { 'status' : { 'status1' : SyncThru . OFFLINE } } except demjson . JSONDecodeError : raise ValueError ( "Invalid host, does not support SyncThru." ) self . data = json_dict | Retrieve the data from the printer . Throws ValueError if host does not support SyncThru |
52,831 | def model ( self ) : try : return self . data . get ( 'identity' ) . get ( 'model_name' ) except ( KeyError , AttributeError ) : return self . device_status_simple ( '' ) | Return the model name of the printer . |
52,832 | def location ( self ) : try : return self . data . get ( 'identity' ) . get ( 'location' ) except ( KeyError , AttributeError ) : return self . device_status_simple ( '' ) | Return the location of the printer . |
52,833 | def serial_number ( self ) : try : return self . data . get ( 'identity' ) . get ( 'serial_num' ) except ( KeyError , AttributeError ) : return self . device_status_simple ( '' ) | Return the serial number of the printer . |
52,834 | def hostname ( self ) : try : return self . data . get ( 'identity' ) . get ( 'host_name' ) except ( KeyError , AttributeError ) : return self . device_status_simple ( '' ) | Return the hostname of the printer . |
52,835 | def device_status ( self ) : try : return self . device_status_simple ( self . data . get ( 'status' ) . get ( 'status1' ) ) except ( KeyError , AttributeError ) : return self . device_status_simple ( '' ) | Return the status of the device as string . |
52,836 | def capability ( self ) -> Dict [ str , Any ] : try : return self . data . get ( 'capability' , { } ) except ( KeyError , AttributeError ) : return { } | Return the capabilities of the printer . |
52,837 | def toner_status ( self , filter_supported : bool = True ) -> Dict [ str , Any ] : toner_status = { } for color in self . COLOR_NAMES : try : toner_stat = self . data . get ( '{}_{}' . format ( SyncThru . TONER , color ) , { } ) if filter_supported and toner_stat . get ( 'opt' , 0 ) == 0 : continue else : toner_status [ color ] = toner_stat except ( KeyError , AttributeError ) : toner_status [ color ] = { } return toner_status | Return the state of all toners cartridges . |
52,838 | def input_tray_status ( self , filter_supported : bool = True ) -> Dict [ int , Any ] : tray_status = { } for i in range ( 1 , 5 ) : try : tray_stat = self . data . get ( '{}{}' . format ( SyncThru . TRAY , i ) , { } ) if filter_supported and tray_stat . get ( 'opt' , 0 ) == 0 : continue else : tray_status [ i ] = tray_stat except ( KeyError , AttributeError ) : tray_status [ i ] = { } return tray_status | Return the state of all input trays . |
52,839 | def output_tray_status ( self ) -> Dict [ int , Dict [ str , str ] ] : tray_status = { } try : tray_stat = self . data . get ( 'outputTray' , [ ] ) for i , stat in enumerate ( tray_stat ) : tray_status [ i ] = { 'name' : stat [ 0 ] , 'capacity' : stat [ 1 ] , 'status' : stat [ 2 ] , } except ( KeyError , AttributeError ) : tray_status = { } return tray_status | Return the state of all output trays . |
52,840 | def drum_status ( self , filter_supported : bool = True ) -> Dict [ str , Any ] : drum_status = { } for color in self . COLOR_NAMES : try : drum_stat = self . data . get ( '{}_{}' . format ( SyncThru . DRUM , color ) , { } ) if filter_supported and drum_stat . get ( 'opt' , 0 ) == 0 : continue else : drum_status [ color ] = drum_stat except ( KeyError , AttributeError ) : drum_status [ color ] = { } return drum_status | Return the state of all drums . |
52,841 | def get_caller_stack_info ( start_back : int = 1 ) -> List [ str ] : r callers = [ ] frameinfolist = inspect . stack ( ) frameinfolist = frameinfolist [ start_back : ] for frameinfo in frameinfolist : frame = frameinfo . frame function_defined_at = "... defined at {filename}:{line}" . format ( filename = frame . f_code . co_filename , line = frame . f_code . co_firstlineno , ) argvalues = inspect . getargvalues ( frame ) formatted_argvalues = inspect . formatargvalues ( * argvalues ) function_call = "{funcname}{argvals}" . format ( funcname = frame . f_code . co_name , argvals = formatted_argvalues , ) code_context = frameinfo . code_context code = "" . join ( code_context ) if code_context else "" onwards = "... line {line} calls next in stack; code is:\n{c}" . format ( line = frame . f_lineno , c = code , ) description = "\n" . join ( [ function_call , function_defined_at , onwards ] ) callers . append ( description ) return list ( reversed ( callers ) ) | r Retrieves a textual representation of the call stack . |
52,842 | def cxxRecordDecl ( * args ) : kinds = [ CursorKind . CLASS_DECL , CursorKind . CLASS_TEMPLATE , ] inner = [ PredMatcher ( is_kind ( k ) ) for k in kinds ] return allOf ( anyOf ( * inner ) , * args ) | Matches C ++ class declarations . |
52,843 | def recordDecl ( * args ) : kinds = [ CursorKind . STRUCT_DECL , CursorKind . UNION_DECL , CursorKind . CLASS_DECL , CursorKind . CLASS_TEMPLATE , ] inner = [ PredMatcher ( is_kind ( k ) ) for k in kinds ] return allOf ( anyOf ( * inner ) , * args ) | Matches class struct and union declarations . |
52,844 | def convert_to_int ( x : Any , default : int = None ) -> int : try : return int ( x ) except ( TypeError , ValueError ) : return default | Transforms its input into an integer or returns default . |
52,845 | def convert_attrs_to_uppercase ( obj : Any , attrs : Iterable [ str ] ) -> None : for a in attrs : value = getattr ( obj , a ) if value is None : continue setattr ( obj , a , value . upper ( ) ) | Converts the specified attributes of an object to upper case modifying the object in place . |
52,846 | def convert_attrs_to_lowercase ( obj : Any , attrs : Iterable [ str ] ) -> None : for a in attrs : value = getattr ( obj , a ) if value is None : continue setattr ( obj , a , value . lower ( ) ) | Converts the specified attributes of an object to lower case modifying the object in place . |
52,847 | def load ( klass ) : config = klass ( ) for path in klass . CONF_PATHS : if os . path . exists ( path ) : with open ( path , 'r' ) as conf : config . configure ( yaml . safe_load ( conf ) ) return config | Insantiates the configuration by attempting to load the configuration from YAML files specified by the CONF_PATH module variable . This should be the main entry point for configuration . |
52,848 | def configure ( self , conf = { } ) : if not conf : return if isinstance ( conf , Configuration ) : conf = dict ( conf . options ( ) ) for key , value in conf . items ( ) : opt = self . get ( key , None ) if isinstance ( opt , Configuration ) : opt . configure ( value ) else : setattr ( self , key , value ) | Allows updating of the configuration via a dictionary of configuration terms or a configuration object . Generally speaking this method is utilized to configure the object from a JSON or YAML parsing . |
52,849 | def options ( self ) : keys = self . __class__ . __dict__ . copy ( ) keys . update ( self . __dict__ ) keys = sorted ( keys . keys ( ) ) for opt in keys : val = self . get ( opt ) if val is not None : yield opt , val | Returns an iterable of sorted option names in order to loop through all the configuration directives specified in the class . |
52,850 | def overlaps ( self , other : "Interval" ) -> bool : return not ( self . end <= other . start or self . start >= other . end ) | Does this interval overlap the other? |
52,851 | def contiguous ( self , other : "Interval" ) -> bool : return not ( self . end < other . start or self . start > other . end ) | Does this interval overlap or touch the other? |
52,852 | def contains ( self , time : datetime . datetime , inclusive : bool = True ) -> bool : if inclusive : return self . start <= time <= self . end else : return self . start < time < self . end | Does the interval contain a momentary time? |
52,853 | def within ( self , other : "Interval" , inclusive : bool = True ) -> bool : if not other : return False if inclusive : return self . start >= other . start and self . end <= other . end else : return self . start > other . start and self . end < other . end | Is this interval contained within the other? |
52,854 | def component_on_date ( self , date : datetime . date ) -> Optional [ "Interval" ] : return self . intersection ( Interval . wholeday ( date ) ) | Returns the part of this interval that falls on the date given or None if the interval doesn t have any part during that date . |
52,855 | def n_weekends ( self ) -> int : startdate = self . start . date ( ) enddate = self . end . date ( ) ndays = ( enddate - startdate ) . days + 1 in_weekend = False n_weekends = 0 for i in range ( ndays ) : date = startdate + datetime . timedelta ( days = i ) if not in_weekend and is_weekend ( date ) : in_weekend = True n_weekends += 1 elif in_weekend and not is_weekend ( date ) : in_weekend = False return n_weekends | Returns the number of weekends that this interval covers . Includes partial weekends . |
52,856 | def add ( self , interval : Interval ) -> None : if interval is None : return if not isinstance ( interval , Interval ) : raise TypeError ( "Attempt to insert non-Interval into IntervalList" ) self . intervals . append ( interval ) self . _tidy ( ) | Adds an interval to the list . If self . no_overlap is True as is the default it will merge any overlapping intervals thus created . |
52,857 | def _tidy ( self ) -> None : if self . no_overlap : self . remove_overlap ( self . no_contiguous ) else : self . _sort ( ) | Removes overlaps etc . and sorts . |
52,858 | def remove_overlap ( self , also_remove_contiguous : bool = False ) -> None : overlap = True while overlap : overlap = self . _remove_overlap_sub ( also_remove_contiguous ) self . _sort ( ) | Merges any overlapping intervals . |
52,859 | def _any_overlap_or_contiguous ( self , test_overlap : bool ) -> bool : for i in range ( len ( self . intervals ) ) : for j in range ( i + 1 , len ( self . intervals ) ) : first = self . intervals [ i ] second = self . intervals [ j ] if test_overlap : test = first . overlaps ( second ) else : test = first . contiguous ( second ) if test : return True return False | Do any of the intervals overlap? |
52,860 | def total_duration ( self ) -> datetime . timedelta : total = datetime . timedelta ( ) for interval in self . intervals : total += interval . duration ( ) return total | Returns a datetime . timedelta object with the total sum of durations . If there is overlap time will be double - counted so beware! |
52,861 | def durations ( self ) -> List [ datetime . timedelta ] : return [ x . duration ( ) for x in self . intervals ] | Returns a list of datetime . timedelta objects representing the durations of each interval in our list . |
52,862 | def longest_duration ( self ) -> Optional [ datetime . timedelta ] : if not self . intervals : return None return max ( self . durations ( ) ) | Returns the duration of the longest interval or None if none . |
52,863 | def longest_interval ( self ) -> Optional [ Interval ] : longest_duration = self . longest_duration ( ) for i in self . intervals : if i . duration ( ) == longest_duration : return i return None | Returns the longest interval or None if none . |
52,864 | def first_interval_starting ( self , start : datetime . datetime ) -> Optional [ Interval ] : for i in self . intervals : if i . start == start : return i return None | Returns our first interval that starts with the start parameter or None . |
52,865 | def first_interval_ending ( self , end : datetime . datetime ) -> Optional [ Interval ] : for i in self . intervals : if i . end == end : return i return None | Returns our first interval that ends with the end parameter or None . |
52,866 | def subset ( self , interval : Interval , flexibility : int = 2 ) -> "IntervalList" : if flexibility not in [ 0 , 1 , 2 ] : raise ValueError ( "subset: bad flexibility value" ) permitted = [ ] for i in self . intervals : if flexibility == 0 : ok = i . start > interval . start and i . end < interval . end elif flexibility == 1 : ok = i . end > interval . start and i . start < interval . end else : ok = i . end >= interval . start and i . start <= interval . end if ok : permitted . append ( i ) return IntervalList ( permitted ) | Returns an IntervalList that s a subset of this one only containing intervals that meet the interval parameter criterion . What meet means is defined by the flexibility parameter . |
52,867 | def max_consecutive_days ( self ) -> Optional [ Tuple [ int , Interval ] ] : if len ( self . intervals ) == 0 : return None startdate = self . start_date ( ) enddate = self . end_date ( ) seq = '' ndays = ( enddate - startdate ) . days + 1 for i in range ( ndays ) : date = startdate + datetime . timedelta ( days = i ) wholeday = Interval . wholeday ( date ) if any ( [ x . overlaps ( wholeday ) for x in self . intervals ] ) : seq += '+' else : seq += ' ' longest = max ( seq . split ( ) , key = len ) longest_len = len ( longest ) longest_idx = seq . index ( longest ) longest_interval = Interval . dayspan ( startdate + datetime . timedelta ( days = longest_idx ) , startdate + datetime . timedelta ( days = longest_idx + longest_len ) ) return longest_len , longest_interval | The length of the longest sequence of days in which all days include an interval . |
52,868 | def cumulative_time_to ( self , when : datetime . datetime ) -> datetime . timedelta : assert self . no_overlap , self . _ONLY_FOR_NO_INTERVAL cumulative = datetime . timedelta ( ) for interval in self . intervals : if interval . start >= when : break elif interval . end <= when : cumulative += interval . duration ( ) else : cumulative += when - interval . start return cumulative | Returns the cumulative time contained in our intervals up to the specified time point . |
52,869 | def cumulative_gaps_to ( self , when : datetime . datetime ) -> datetime . timedelta : gaps = self . gaps ( ) return gaps . cumulative_time_to ( when ) | Return the cumulative time within our gaps up to when . |
52,870 | def time_afterwards_preceding ( self , when : datetime . datetime ) -> Optional [ datetime . timedelta ] : if self . is_empty ( ) : return None end_time = self . end_datetime ( ) if when <= end_time : return datetime . timedelta ( ) else : return when - end_time | Returns the time after our last interval but before when . If self is an empty list returns None . |
52,871 | def repr_parameter ( param : inspect . Parameter ) -> str : return ( "Parameter(name={name}, annotation={annotation}, kind={kind}, " "default={default}" . format ( name = param . name , annotation = param . annotation , kind = param . kind , default = param . default ) ) | Provides a repr - style representation of a function parameter . |
52,872 | def gpg_version ( ) : cmd = flatten ( [ gnupg_bin ( ) , "--version" ] ) output = stderr_output ( cmd ) output = output . split ( '\n' ) [ 0 ] . split ( " " ) [ 2 ] . split ( '.' ) return tuple ( [ int ( x ) for x in output ] ) | Returns the GPG version |
52,873 | def passphrase_file ( passphrase = None ) : cmd = [ ] pass_file = None if not passphrase and 'CRYPTORITO_PASSPHRASE_FILE' in os . environ : pass_file = os . environ [ 'CRYPTORITO_PASSPHRASE_FILE' ] if not os . path . isfile ( pass_file ) : raise CryptoritoError ( 'CRYPTORITO_PASSPHRASE_FILE is invalid' ) elif passphrase : tmpdir = ensure_tmpdir ( ) pass_file = "%s/p_pass" % tmpdir p_handle = open ( pass_file , 'w' ) p_handle . write ( passphrase ) p_handle . close ( ) if pass_file : cmd = cmd + [ "--batch" , "--passphrase-file" , pass_file ] vsn = gpg_version ( ) if vsn [ 0 ] >= 2 and vsn [ 1 ] >= 1 : cmd = cmd + [ "--pinentry-mode" , "loopback" ] return cmd | Read passphrase from a file . This should only ever be used by our built in integration tests . At this time during normal operation only pinentry is supported for entry of passwords . |
52,874 | def gnupg_home ( ) : if 'GNUPGHOME' in os . environ : gnupghome = os . environ [ 'GNUPGHOME' ] if not os . path . isdir ( gnupghome ) : raise CryptoritoError ( "Invalid GNUPGHOME directory" ) return [ "--homedir" , gnupghome ] else : return [ ] | Returns appropriate arguments if GNUPGHOME is set |
52,875 | def fingerprint_from_keybase ( fingerprint , kb_obj ) : if 'public_keys' in kb_obj and 'pgp_public_keys' in kb_obj [ 'public_keys' ] : for key in kb_obj [ 'public_keys' ] [ 'pgp_public_keys' ] : keyprint = fingerprint_from_var ( key ) . lower ( ) fingerprint = fingerprint . lower ( ) if fingerprint == keyprint or keyprint . startswith ( fingerprint ) or keyprint . endswith ( fingerprint ) : return { 'fingerprint' : keyprint , 'bundle' : key } return None | Extracts a key matching a specific fingerprint from a Keybase API response |
52,876 | def key_from_keybase ( username , fingerprint = None ) : url = keybase_lookup_url ( username ) resp = requests . get ( url ) if resp . status_code == 200 : j_resp = json . loads ( polite_string ( resp . content ) ) if 'them' in j_resp and len ( j_resp [ 'them' ] ) == 1 : kb_obj = j_resp [ 'them' ] [ 0 ] if fingerprint : return fingerprint_from_keybase ( fingerprint , kb_obj ) else : if 'public_keys' in kb_obj and 'pgp_public_keys' in kb_obj [ 'public_keys' ] : key = kb_obj [ 'public_keys' ] [ 'primary' ] return massage_key ( key ) return None | Look up a public key from a username |
52,877 | def has_gpg_key ( fingerprint ) : if len ( fingerprint ) > 8 : fingerprint = fingerprint [ - 8 : ] fingerprint = fingerprint . upper ( ) cmd = flatten ( [ gnupg_bin ( ) , gnupg_home ( ) , "--list-public-keys" ] ) lines = stderr_output ( cmd ) . split ( '\n' ) return len ( [ key for key in lines if key . find ( fingerprint ) > - 1 ] ) == 1 | Checks to see if we have this gpg fingerprint |
52,878 | def fingerprint_from_var ( var ) : vsn = gpg_version ( ) cmd = flatten ( [ gnupg_bin ( ) , gnupg_home ( ) ] ) if vsn [ 0 ] >= 2 and vsn [ 1 ] < 1 : cmd . append ( "--with-fingerprint" ) output = polite_string ( stderr_with_input ( cmd , var ) ) . split ( '\n' ) if not output [ 0 ] . startswith ( 'pub' ) : raise CryptoritoError ( 'probably an invalid gpg key' ) if vsn [ 0 ] >= 2 and vsn [ 1 ] < 1 : return output [ 1 ] . split ( '=' ) [ 1 ] . replace ( ' ' , '' ) return output [ 1 ] . strip ( ) | Extract a fingerprint from a GPG public key |
52,879 | def fingerprint_from_file ( filename ) : cmd = flatten ( [ gnupg_bin ( ) , gnupg_home ( ) , filename ] ) outp = stderr_output ( cmd ) . split ( '\n' ) if not outp [ 0 ] . startswith ( 'pub' ) : raise CryptoritoError ( 'probably an invalid gpg key' ) return outp [ 1 ] . strip ( ) | Extract a fingerprint from a GPG public key file |
52,880 | def stderr_output ( cmd ) : handle , gpg_stderr = stderr_handle ( ) try : output = subprocess . check_output ( cmd , stderr = gpg_stderr ) if handle : handle . close ( ) return str ( polite_string ( output ) ) except subprocess . CalledProcessError as exception : LOGGER . debug ( "GPG Command %s" , ' ' . join ( exception . cmd ) ) LOGGER . debug ( "GPG Output %s" , exception . output ) raise CryptoritoError ( 'GPG Execution' ) | Wraps the execution of check_output in a way that ignores stderr when not in debug mode |
52,881 | def stderr_with_input ( cmd , stdin ) : handle , gpg_stderr = stderr_handle ( ) LOGGER . debug ( "GPG command %s" , ' ' . join ( cmd ) ) try : gpg_proc = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stdin = subprocess . PIPE , stderr = gpg_stderr ) output , _err = gpg_proc . communicate ( polite_bytes ( stdin ) ) if handle : handle . close ( ) return output except subprocess . CalledProcessError as exception : return gpg_error ( exception , 'GPG variable encryption error' ) except OSError as exception : raise CryptoritoError ( "File %s not found" % exception . filename ) | Runs a command passing something in stdin and returning whatever came out from stdout |
52,882 | def import_gpg_key ( key ) : if not key : raise CryptoritoError ( 'Invalid GPG Key' ) key_fd , key_filename = mkstemp ( "cryptorito-gpg-import" ) key_handle = os . fdopen ( key_fd , 'w' ) key_handle . write ( polite_string ( key ) ) key_handle . close ( ) cmd = flatten ( [ gnupg_bin ( ) , gnupg_home ( ) , "--import" , key_filename ] ) output = stderr_output ( cmd ) msg = 'gpg: Total number processed: 1' output_bits = polite_string ( output ) . split ( '\n' ) return len ( [ line for line in output_bits if line == msg ] ) == 1 | Imports a GPG key |
52,883 | def export_gpg_key ( key ) : cmd = flatten ( [ gnupg_bin ( ) , gnupg_verbose ( ) , gnupg_home ( ) , "--export" , key ] ) handle , gpg_stderr = stderr_handle ( ) try : gpg_proc = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = gpg_stderr ) output , _err = gpg_proc . communicate ( ) if handle : handle . close ( ) return portable_b64encode ( output ) except subprocess . CalledProcessError as exception : LOGGER . debug ( "GPG Command %s" , ' ' . join ( exception . cmd ) ) LOGGER . debug ( "GPG Output %s" , exception . output ) raise CryptoritoError ( 'GPG encryption error' ) | Exports a GPG key and returns it |
52,884 | def encrypt ( source , dest , keys ) : cmd = flatten ( [ gnupg_bin ( ) , "--armor" , "--output" , dest , gnupg_verbose ( ) , gnupg_home ( ) , recipients_args ( keys ) , "--encrypt" , source ] ) stderr_output ( cmd ) return True | Encrypts a file using the given keys |
52,885 | def encrypt_var ( source , keys ) : cmd = flatten ( [ gnupg_bin ( ) , "--armor" , "--encrypt" , gnupg_verbose ( ) , recipients_args ( keys ) ] ) output = stderr_with_input ( cmd , source ) return output | Attempts to encrypt a variable |
52,886 | def gpg_error ( exception , message ) : LOGGER . debug ( "GPG Command %s" , ' ' . join ( [ str ( x ) for x in exception . cmd ] ) ) LOGGER . debug ( "GPG Output %s" , exception . output ) raise CryptoritoError ( message ) | Handles the output of subprocess errors in a way that is compatible with the log level |
52,887 | def decrypt_var ( source , passphrase = None ) : cmd = [ gnupg_bin ( ) , "--decrypt" , gnupg_home ( ) , gnupg_verbose ( ) , passphrase_file ( passphrase ) ] return stderr_with_input ( flatten ( cmd ) , source ) | Attempts to decrypt a variable |
52,888 | def decrypt ( source , dest = None , passphrase = None ) : if not os . path . exists ( source ) : raise CryptoritoError ( "Encrypted file %s not found" % source ) cmd = [ gnupg_bin ( ) , gnupg_verbose ( ) , "--decrypt" , gnupg_home ( ) , passphrase_file ( passphrase ) ] if dest : cmd . append ( [ "--output" , dest ] ) cmd . append ( [ source ] ) stderr_output ( flatten ( cmd ) ) return True | Attempts to decrypt a file |
52,889 | def is_base64 ( string ) : return ( not re . match ( '^[0-9]+$' , string ) ) and ( len ( string ) % 4 == 0 ) and re . match ( '^[A-Za-z0-9+/]+[=]{0,2}$' , string ) | Determines whether or not a string is likely to be base64 encoded binary nonsense |
52,890 | def portable_b64encode ( thing ) : if is_py3 ( ) : try : some_bits = bytes ( thing , 'utf-8' ) except TypeError : some_bits = thing return polite_string ( b64encode ( some_bits ) . decode ( 'utf-8' ) ) return polite_string ( b64encode ( thing ) ) | Wrap b64encode for Python 2 & 3 |
52,891 | def download_if_not_exists ( url : str , filename : str , skip_cert_verify : bool = True , mkdir : bool = True ) -> None : if os . path . isfile ( filename ) : log . info ( "No need to download, already have: {}" , filename ) return if mkdir : directory , basename = os . path . split ( os . path . abspath ( filename ) ) mkdir_p ( directory ) download ( url = url , filename = filename , skip_cert_verify = skip_cert_verify ) | Downloads a URL to a file unless the file already exists . |
52,892 | def git_clone ( prettyname : str , url : str , directory : str , branch : str = None , commit : str = None , clone_options : List [ str ] = None , run_func : Callable [ [ List [ str ] ] , Any ] = None ) -> bool : run_func = run_func or subprocess . check_call clone_options = clone_options or [ ] if os . path . isdir ( directory ) : log . info ( "Not re-cloning {} Git repository: using existing source " "in {}" . format ( prettyname , directory ) ) return False log . info ( "Fetching {} source from {} into {}" , prettyname , url , directory ) require_executable ( GIT ) gitargs = [ GIT , "clone" ] + clone_options if branch : gitargs += [ "--branch" , branch ] gitargs += [ url , directory ] run_func ( gitargs ) if commit : log . info ( "Resetting {} local Git repository to commit {}" , prettyname , commit ) run_func ( [ GIT , "-C" , directory , "reset" , "--hard" , commit ] ) return True | Fetches a Git repository unless we have it already . |
52,893 | def untar_to_directory ( tarfile : str , directory : str , verbose : bool = False , gzipped : bool = False , skip_if_dir_exists : bool = True , run_func : Callable [ [ List [ str ] ] , Any ] = None , chdir_via_python : bool = True ) -> None : if skip_if_dir_exists and os . path . isdir ( directory ) : log . info ( "Skipping extraction of {} as directory {} exists" , tarfile , directory ) return log . info ( "Extracting {} -> {}" , tarfile , directory ) require_executable ( TAR ) mkdir_p ( directory ) args = [ TAR , "-x" ] if verbose : args . append ( "-v" ) if gzipped : args . append ( "-z" ) if platform . system ( ) != "Darwin" : args . append ( "--force-local" ) args . extend ( [ "-f" , tarfile ] ) if chdir_via_python : with pushd ( directory ) : run_func ( args ) else : args . extend ( [ "-C" , directory ] ) run_func ( args ) | Unpacks a TAR file into a specified directory . |
52,894 | def run ( args : List [ str ] , env : Dict [ str , str ] = None , capture_stdout : bool = False , echo_stdout : bool = True , capture_stderr : bool = False , echo_stderr : bool = True , debug_show_env : bool = True , encoding : str = sys . getdefaultencoding ( ) , allow_failure : bool = False , ** kwargs ) -> Tuple [ str , str ] : cwd = os . getcwd ( ) copy_paste_cmd = subprocess . list2cmdline ( args ) csep = "=" * 79 esep = "-" * 79 effective_env = env or os . environ if debug_show_env : log . debug ( "Environment for the command that follows:\n" "{esep}\n" "{env}\n" "{esep}" . format ( esep = esep , env = make_copy_paste_env ( effective_env ) ) ) log . info ( "Launching external command:\n" "{csep}\n" "WORKING DIRECTORY: {cwd}\n" "PYTHON ARGS: {args!r}\n" "COMMAND: {cmd}\n" "{csep}" . format ( csep = csep , cwd = cwd , cmd = copy_paste_cmd , args = args ) ) try : with io . StringIO ( ) as out , io . StringIO ( ) as err : stdout_targets = [ ] stderr_targets = [ ] if capture_stdout : stdout_targets . append ( out ) if echo_stdout : stdout_targets . append ( sys . stdout ) if capture_stderr : stderr_targets . append ( err ) if echo_stderr : stderr_targets . append ( sys . stderr ) retcode = teed_call ( args , stdout_targets = stdout_targets , stderr_targets = stderr_targets , encoding = encoding , env = env , ** kwargs ) stdout = out . getvalue ( ) stderr = err . getvalue ( ) if retcode != 0 and not allow_failure : raise subprocess . CalledProcessError ( returncode = retcode , cmd = args , output = stdout , stderr = stderr ) log . debug ( "\n{csep}\nFINISHED SUCCESSFULLY: {cmd}\n{csep}" , cmd = copy_paste_cmd , csep = csep ) return stdout , stderr except FileNotFoundError : require_executable ( args [ 0 ] ) raise except subprocess . CalledProcessError : log . critical ( "Command that failed:\n" "[ENVIRONMENT]\n" "{env}\n" "\n" "[DIRECTORY] {cwd}\n" "[PYTHON ARGS] {args}\n" "[COMMAND] {cmd}" . format ( cwd = cwd , env = make_copy_paste_env ( effective_env ) , cmd = copy_paste_cmd , args = args ) ) raise | Runs an external process announcing it . |
52,895 | def fetch ( args : List [ str ] , env : Dict [ str , str ] = None , encoding : str = sys . getdefaultencoding ( ) ) -> str : stdout , _ = run ( args , env = env , capture_stdout = True , echo_stdout = False , encoding = encoding ) log . debug ( stdout ) return stdout | Run a command and returns its stdout . |
52,896 | def dump_connection_info ( engine : Engine , fileobj : TextIO = sys . stdout ) -> None : meta = MetaData ( bind = engine ) writeline_nl ( fileobj , sql_comment ( 'Database info: {}' . format ( meta ) ) ) | Dumps some connection info as an SQL comment . Obscures passwords . |
52,897 | def dump_ddl ( metadata : MetaData , dialect_name : str , fileobj : TextIO = sys . stdout , checkfirst : bool = True ) -> None : def dump ( querysql , * multiparams , ** params ) : compsql = querysql . compile ( dialect = engine . dialect ) writeline_nl ( fileobj , "{sql};" . format ( sql = compsql ) ) writeline_nl ( fileobj , sql_comment ( "Schema (for dialect {}):" . format ( dialect_name ) ) ) engine = create_engine ( '{dialect}://' . format ( dialect = dialect_name ) , strategy = 'mock' , executor = dump ) metadata . create_all ( engine , checkfirst = checkfirst ) | Sends schema - creating DDL from the metadata to the dump engine . This makes CREATE TABLE statements . |
52,898 | def dump_table_as_insert_sql ( engine : Engine , table_name : str , fileobj : TextIO , wheredict : Dict [ str , Any ] = None , include_ddl : bool = False , multirow : bool = False ) -> None : log . info ( "dump_data_as_insert_sql: table_name={}" , table_name ) writelines_nl ( fileobj , [ SEP1 , sql_comment ( "Data for table: {}" . format ( table_name ) ) , SEP2 , sql_comment ( "Filters: {}" . format ( wheredict ) ) , ] ) dialect = engine . dialect if not dialect . supports_multivalues_insert : multirow = False if multirow : log . warning ( "dump_data_as_insert_sql: multirow parameter substitution " "not working yet" ) multirow = False meta = MetaData ( bind = engine ) log . debug ( "... retrieving schema" ) table = Table ( table_name , meta , autoload = True ) if include_ddl : log . debug ( "... producing DDL" ) dump_ddl ( table . metadata , dialect_name = engine . dialect . name , fileobj = fileobj ) log . debug ( "... fetching records" ) query = select ( table . columns ) if wheredict : for k , v in wheredict . items ( ) : col = table . columns . get ( k ) query = query . where ( col == v ) cursor = engine . execute ( query ) if multirow : row_dict_list = [ ] for r in cursor : row_dict_list . append ( dict ( r ) ) if row_dict_list : statement = table . insert ( ) . values ( row_dict_list ) insert_str = get_literal_query ( statement , bind = engine ) writeline_nl ( fileobj , insert_str ) else : writeline_nl ( fileobj , sql_comment ( "No data!" ) ) else : found_one = False for r in cursor : found_one = True row_dict = dict ( r ) statement = table . insert ( values = row_dict ) insert_str = get_literal_query ( statement , bind = engine ) writeline_nl ( fileobj , insert_str ) if not found_one : writeline_nl ( fileobj , sql_comment ( "No data!" ) ) writeline_nl ( fileobj , SEP2 ) log . debug ( "... done" ) | Reads a table from the database and writes SQL to replicate the table s data to the output fileobj . |
52,899 | def dump_database_as_insert_sql ( engine : Engine , fileobj : TextIO = sys . stdout , include_ddl : bool = False , multirow : bool = False ) -> None : for tablename in get_table_names ( engine ) : dump_table_as_insert_sql ( engine = engine , table_name = tablename , fileobj = fileobj , include_ddl = include_ddl , multirow = multirow ) | Reads an entire database and writes SQL to replicate it to the output file - like object . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.