idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
5,700 | def assert_unordered_list_eq ( expected , actual , message = None ) : missing_in_actual = [ ] missing_in_expected = list ( actual ) for x in expected : try : missing_in_expected . remove ( x ) except ValueError : missing_in_actual . append ( x ) if missing_in_actual or missing_in_expected : if not message : message = (... | Raises an AssertionError if the objects contained in expected are not equal to the objects contained in actual without regard to their order . |
5,701 | def _execute_if_not_empty ( func ) : def wrapper ( * args , ** kwargs ) : if any ( args [ 1 : ] ) or any ( kwargs . items ( ) ) : return func ( * args , ** kwargs ) return wrapper | Execute function only if one of input parameters is not empty |
5,702 | def prepare_search_body ( self , should_terms = None , must_terms = None , must_not_terms = None , search_text = '' , start = None , end = None ) : self . body = self . SearchBody ( ) self . body . set_should_terms ( should_terms ) self . body . set_must_terms ( must_terms ) self . body . set_must_not_terms ( must_not_... | Prepare body for elasticsearch query |
5,703 | def execute ( cls , instance , async = True , countdown = 2 , is_heavy_task = False , ** kwargs ) : cls . pre_apply ( instance , async = async , ** kwargs ) result = cls . apply_signature ( instance , async = async , countdown = countdown , is_heavy_task = is_heavy_task , ** kwargs ) cls . post_apply ( instance , async... | Execute high level - operation |
5,704 | def apply_signature ( cls , instance , async = True , countdown = None , is_heavy_task = False , ** kwargs ) : serialized_instance = utils . serialize_instance ( instance ) signature = cls . get_task_signature ( instance , serialized_instance , ** kwargs ) link = cls . get_success_signature ( instance , serialized_inst... | Serialize input data and apply signature |
5,705 | def _apply_callback ( cls , callback , result ) : if not callback . immutable : callback . args = ( result . id , ) + callback . args callback . apply ( ) | Synchronously execute callback |
5,706 | def is_disabled_action ( view ) : if not isinstance ( view , core_views . ActionsViewSet ) : return False action = getattr ( view , 'action' , None ) return action in view . disabled_actions if action is not None else False | Checks whether Link action is disabled . |
5,707 | def get_allowed_methods ( self , callback ) : if hasattr ( callback , 'actions' ) : return [ method . upper ( ) for method in callback . actions . keys ( ) if method != 'head' ] return [ method for method in callback . cls ( ) . allowed_methods if method not in ( 'OPTIONS' , 'HEAD' ) ] | Return a list of the valid HTTP methods for this endpoint . |
5,708 | def create_view ( self , callback , method , request = None ) : view = super ( WaldurSchemaGenerator , self ) . create_view ( callback , method , request ) if is_disabled_action ( view ) : view . exclude_from_schema = True return view | Given a callback return an actual view instance . |
5,709 | def get_description ( self , path , method , view ) : description = super ( WaldurSchemaGenerator , self ) . get_description ( path , method , view ) permissions_description = get_permissions_description ( view , method ) if permissions_description : description += '\n\n' + permissions_description if description else p... | Determine a link description . |
5,710 | def delete_error_message ( sender , instance , name , source , target , ** kwargs ) : if source != StateMixin . States . ERRED : return instance . error_message = '' instance . save ( update_fields = [ 'error_message' ] ) | Delete error message if instance state changed from erred |
5,711 | def _make_value ( self , value ) : member = self . __new__ ( self , value ) member . __init__ ( value ) return member | Instantiates an enum with an arbitrary value . |
5,712 | def parse ( cls , value , default = _no_default ) : if isinstance ( value , cls ) : return value elif isinstance ( value , six . integer_types ) and not isinstance ( value , EnumBase ) : e = cls . _value_to_member . get ( value , _no_default ) else : e = cls . _name_to_member . get ( value , _no_default ) if e is _no_d... | Parses an enum member name or value into an enum member . |
5,713 | def parse ( cls , value , default = _no_default ) : if isinstance ( value , cls ) : return value elif isinstance ( value , int ) : e = cls . _make_value ( value ) else : if not value : e = cls . _make_value ( 0 ) else : r = 0 for k in value . split ( "," ) : v = cls . _name_to_member . get ( k , _no_default ) if v is _... | Parses a flag integer or string into a Flags instance . |
5,714 | def get_permission_checks ( self , request , view ) : if view . action is None : return [ ] if hasattr ( view , view . action + '_permissions' ) : return getattr ( view , view . action + '_permissions' ) extra_permissions = getattr ( view , view . action + 'extra_permissions' , [ ] ) if request . method in SAFE_METHODS... | Get permission checks that will be executed for current action . |
5,715 | def add_function ( self , function ) : if not len ( self . settings . FUNCTION_MANAGERS ) : raise ConfigurationError ( 'Where have the default function manager gone?!' ) self . settings . FUNCTION_MANAGERS [ 0 ] . add_function ( function ) | Registers the function to the server s default fixed function manager . |
5,716 | def format_raw_field ( key ) : subfield = django_settings . WALDUR_CORE . get ( 'ELASTICSEARCH' , { } ) . get ( 'raw_subfield' , 'keyword' ) return '%s.%s' % ( camel_case_to_underscore ( key ) , subfield ) | When ElasticSearch analyzes string it breaks it into parts . In order make query for not - analyzed exact string values we should use subfield instead . |
5,717 | def decorate ( decorator_cls , * args , ** kwargs ) : global _wrappers wrapper_cls = _wrappers . get ( decorator_cls , None ) if wrapper_cls is None : class PythonWrapper ( decorator_cls ) : pass wrapper_cls = PythonWrapper wrapper_cls . __name__ = decorator_cls . __name__ + "PythonWrapper" _wrappers [ decorator_cls ] ... | Creates a decorator function that applies the decorator_cls that was passed in . |
5,718 | def deprecated ( replacement_description ) : def decorate ( fn_or_class ) : if isinstance ( fn_or_class , type ) : pass else : try : fn_or_class . __doc__ = "This API point is obsolete. %s\n\n%s" % ( replacement_description , fn_or_class . __doc__ , ) except AttributeError : pass return fn_or_class return decorate | States that method is deprecated . |
5,719 | def convert_result ( converter ) : def decorate ( fn ) : @ inspection . wraps ( fn ) def new_fn ( * args , ** kwargs ) : return converter ( fn ( * args , ** kwargs ) ) return new_fn return decorate | Decorator that can convert the result of a function call . |
5,720 | def retry ( exception_cls , max_tries = 10 , sleep = 0.05 ) : assert max_tries > 0 def with_max_retries_call ( delegate ) : for i in xrange ( 0 , max_tries ) : try : return delegate ( ) except exception_cls : if i + 1 == max_tries : raise time . sleep ( sleep ) def outer ( fn ) : is_generator = inspect . isgeneratorfun... | Decorator for retrying a function if it throws an exception . |
5,721 | def decorator_of_context_manager ( ctxt ) : def decorator_fn ( * outer_args , ** outer_kwargs ) : def decorator ( fn ) : @ functools . wraps ( fn ) def wrapper ( * args , ** kwargs ) : with ctxt ( * outer_args , ** outer_kwargs ) : return fn ( * args , ** kwargs ) return wrapper return decorator if getattr ( ctxt , "__... | Converts a context manager into a decorator . |
5,722 | def get_error ( self , error ) : error_type = type ( error ) if error . error_type == ET_CLIENT : error_type_name = 'Client' else : error_type_name = 'Server' return { 'type' : error_type_name , 'name' : error_type . __name__ , 'prefix' : getattr ( error_type , '__module__' , '' ) , 'message' : unicode ( error ) , 'par... | A helper function gets standard information from the error . |
5,723 | def validate_quota_change ( self , quota_deltas , raise_exception = False ) : errors = [ ] for name , delta in six . iteritems ( quota_deltas ) : quota = self . quotas . get ( name = name ) if quota . is_exceeded ( delta ) : errors . append ( '%s quota limit: %s, requires %s (%s)\n' % ( quota . name , quota . limit , q... | Get error messages about object and his ancestor quotas that will be exceeded if quota_delta will be added . |
5,724 | def get_sum_of_quotas_as_dict ( cls , scopes , quota_names = None , fields = [ 'usage' , 'limit' ] ) : if not scopes : return { } if quota_names is None : quota_names = cls . get_quotas_names ( ) scope_models = set ( [ scope . _meta . model for scope in scopes ] ) if len ( scope_models ) > 1 : raise exceptions . QuotaE... | Return dictionary with sum of all scopes quotas . |
5,725 | def scope_types ( self , request , * args , ** kwargs ) : return response . Response ( utils . get_scope_types_mapping ( ) . keys ( ) ) | Returns a list of scope types acceptable by events filter . |
5,726 | def import_from_file ( self , index , filename ) : with open ( filename , 'r' ) as fp : self . _import_from_struct ( index , json . load ( fp ) ) | Import this instrument s settings from the given file . Will automatically add the instrument s synth and table to the song s synths and tables if needed . |
5,727 | def load_lsdsng ( filename ) : with open ( filename , 'rb' ) as fp : preamble_data = bread . parse ( fp , spec . lsdsng_preamble ) with open ( filename , 'rb' ) as fp : fp . seek ( int ( len ( preamble_data ) / 8 ) ) factory = BlockFactory ( ) while True : block_data = bytearray ( fp . read ( blockutils . BLOCK_SIZE ) ... | Load a Project from a . lsdsng file . |
5,728 | def load_srm ( filename ) : with open ( filename , 'rb' ) as fp : raw_data = fp . read ( ) compressed_data = filepack . compress ( raw_data ) factory = BlockFactory ( ) writer = BlockWriter ( ) writer . write ( compressed_data , factory ) size_in_blocks = len ( factory . blocks ) name = "SRMLOAD" version = 0 return Pro... | Load a Project from an . srm file . |
5,729 | def song ( self ) : if self . _song is None : self . _song = Song ( self . _song_data ) return self . _song | the song associated with the project |
5,730 | def save ( self , filename ) : with open ( filename , 'wb' ) as fp : writer = BlockWriter ( ) factory = BlockFactory ( ) preamble_dummy_bytes = bytearray ( [ 0 ] * 9 ) preamble = bread . parse ( preamble_dummy_bytes , spec . lsdsng_preamble ) preamble . name = self . name preamble . version = self . version preamble_da... | Save a project in . lsdsng format to the target file . |
5,731 | def save_srm ( self , filename ) : with open ( filename , 'wb' ) as fp : raw_data = bread . write ( self . _song_data , spec . song ) fp . write ( raw_data ) | Save a project in . srm format to the target file . |
5,732 | def phase_type ( self , value ) : self . _params . phase_type = value self . _overwrite_lock . disable ( ) | compresses the waveform horizontally ; one of normal resync resync2 |
5,733 | def save ( self , filename , callback = _noop_callback ) : with open ( filename , 'wb' ) as fp : self . _save ( fp , callback ) | Save this file . |
5,734 | def split ( compressed_data , segment_size , block_factory ) : segments = [ ] current_segment_start = 0 index = 0 data_size = len ( compressed_data ) while index < data_size : current_byte = compressed_data [ index ] if index < data_size - 1 : next_byte = compressed_data [ index + 1 ] else : next_byte = None jump_size ... | Splits compressed data into blocks . |
5,735 | def renumber_block_keys ( blocks ) : byte_switch_keys = [ 0 ] block_keys = list ( blocks . keys ( ) ) for block in list ( blocks . values ( ) ) : i = 0 while i < len ( block . data ) - 1 : current_byte = block . data [ i ] next_byte = block . data [ i + 1 ] if current_byte == RLE_BYTE : if next_byte == RLE_BYTE : i += ... | Renumber a block map s indices so that tehy match the blocks block switch statements . |
5,736 | def merge ( blocks ) : current_block = blocks [ sorted ( blocks . keys ( ) ) [ 0 ] ] compressed_data = [ ] eof = False while not eof : data_size_to_append = None next_block = None i = 0 while i < len ( current_block . data ) - 1 : current_byte = current_block . data [ i ] next_byte = current_block . data [ i + 1 ] if c... | Merge the given blocks into a contiguous block of compressed data . |
5,737 | def pad ( segment , size ) : for i in range ( size - len ( segment ) ) : segment . append ( 0 ) assert len ( segment ) == size | Add zeroes to a segment until it reaches a certain size . |
5,738 | def decompress ( compressed_data ) : raw_data = [ ] index = 0 while index < len ( compressed_data ) : current = compressed_data [ index ] index += 1 if current == RLE_BYTE : directive = compressed_data [ index ] index += 1 if directive == RLE_BYTE : raw_data . append ( RLE_BYTE ) else : count = compressed_data [ index ... | Decompress data that has been compressed by the filepack algorithm . |
5,739 | def compress ( raw_data ) : raw_data = bytearray ( raw_data ) compressed_data = [ ] data_size = len ( raw_data ) index = 0 next_bytes = [ - 1 , - 1 , - 1 ] def is_default_instrument ( index ) : if index + len ( DEFAULT_INSTRUMENT_FILEPACK ) > len ( raw_data ) : return False instr_bytes = raw_data [ index : index + len ... | Compress raw bytes with the filepack algorithm . |
5,740 | def name_without_zeroes ( name ) : first_zero = name . find ( b'\0' ) if first_zero == - 1 : return name else : return str ( name [ : first_zero ] ) | Return a human - readable name without LSDJ s trailing zeroes . |
5,741 | def table ( self ) : if hasattr ( self . data , 'table_on' ) and self . data . table_on : assert_index_sane ( self . data . table , len ( self . song . tables ) ) return self . song . tables [ self . data . table ] | a pylsdj . Table referencing the instrument s table or None if the instrument doesn t have a table |
5,742 | def import_lsdinst ( self , struct_data ) : self . name = struct_data [ 'name' ] self . automate = struct_data [ 'data' ] [ 'automate' ] self . pan = struct_data [ 'data' ] [ 'pan' ] if self . table is not None : self . table . import_lsdinst ( struct_data ) | import from an lsdinst struct |
5,743 | def export_to_file ( self , filename ) : instr_json = self . export_struct ( ) with open ( filename , 'w' ) as fp : json . dump ( instr_json , fp , indent = 2 ) | Export this instrument s settings to a file . |
5,744 | def write_wav ( self , filename ) : wave_output = None try : wave_output = wave . open ( filename , 'w' ) wave_output . setparams ( WAVE_PARAMS ) frames = bytearray ( [ x << 4 for x in self . sample_data ] ) wave_output . writeframes ( frames ) finally : if wave_output is not None : wave_output . close ( ) | Write this sample to a WAV file . |
5,745 | def read_wav ( self , filename ) : wave_input = None try : wave_input = wave . open ( filename , 'r' ) wave_frames = bytearray ( wave_input . readframes ( wave_input . getnframes ( ) ) ) self . sample_data = [ x >> 4 for x in wave_frames ] finally : if wave_input is not None : wave_input . close ( ) | Read sample data for this sample from a WAV file . |
5,746 | def get_device_address ( device ) : if device is None : return None command = [ 'ip' , 'route' , 'list' , 'dev' , device ] ip_routes = subprocess . check_output ( command ) . strip ( ) for line in ip_routes . split ( '\n' ) : seen = '' for a in line . split ( ) : if seen == 'src' : return a seen = a return None | find the local ip address on the given device |
5,747 | def get_default_net_device ( ) : with open ( '/proc/net/route' ) as fh : for line in fh : iface , dest , _ = line . split ( None , 2 ) if dest == '00000000' : return iface return None | Find the device where the default route is . |
5,748 | def add_missing_optional_args_with_value_none ( args , optional_args ) : for name in optional_args : if not name in args . keys ( ) : args [ name ] = None return args | Adds key - value pairs to the passed dictionary so that afterwards the dictionary can be used without needing to check for KeyErrors . |
5,749 | def check_presence_of_mandatory_args ( args , mandatory_args ) : missing_args = [ ] for name in mandatory_args : if name not in args . keys ( ) : missing_args . append ( name ) if len ( missing_args ) > 0 : raise ValueError ( 'Missing mandatory arguments: ' + ', ' . join ( missing_args ) ) else : return True | Checks whether all mandatory arguments are passed . |
5,750 | def monkey_patch_migration_template ( self , app , fixture_path ) : self . _MIGRATION_TEMPLATE = writer . MIGRATION_TEMPLATE module_split = app . module . __name__ . split ( '.' ) if len ( module_split ) == 1 : module_import = "import %s\n" % module_split [ 0 ] else : module_import = "from %s import %s\n" % ( '.' . joi... | Monkey patch the django . db . migrations . writer . MIGRATION_TEMPLATE |
5,751 | def migration_exists ( self , app , fixture_path ) : base_name = os . path . basename ( fixture_path ) for migration_path in glob . glob ( os . path . join ( app . path , 'migrations' , '*.py' ) ) : if base_name in open ( migration_path ) . read ( ) : return True return False | Return true if it looks like a migration already exists . |
5,752 | def create_migration ( self , app , fixture_path ) : self . monkey_patch_migration_template ( app , fixture_path ) out = StringIO ( ) management . call_command ( 'makemigrations' , app . label , empty = True , stdout = out ) self . restore_migration_template ( ) self . stdout . write ( out . getvalue ( ) ) | Create a data migration for app that uses fixture_path . |
5,753 | def instantiate_for_read_and_search ( handle_server_url , reverselookup_username , reverselookup_password , ** config ) : if handle_server_url is None and 'reverselookup_baseuri' not in config . keys ( ) : raise TypeError ( 'You must specify either "handle_server_url" or "reverselookup_baseuri".' + ' Searching not poss... | Initialize client with read access and with search function . |
5,754 | def get_value_from_handle ( self , handle , key , handlerecord_json = None ) : LOGGER . debug ( 'get_value_from_handle...' ) handlerecord_json = self . __get_handle_record_if_necessary ( handle , handlerecord_json ) if handlerecord_json is None : raise HandleNotFoundException ( handle = handle ) list_of_entries = handl... | Retrieve a single value from a single Handle . If several entries with this key exist the methods returns the first one . If the handle does not exist the method will raise a HandleNotFoundException . |
5,755 | def delete_handle_value ( self , handle , key ) : LOGGER . debug ( 'delete_handle_value...' ) handlerecord_json = self . retrieve_handle_record_json ( handle ) if handlerecord_json is None : msg = 'Cannot modify unexisting handle' raise HandleNotFoundException ( handle = handle , msg = msg ) list_of_entries = handlerec... | Delete a key - value pair from a handle record . If the key exists more than once all key - value pairs with this key are deleted . |
5,756 | def delete_handle ( self , handle , * other ) : LOGGER . debug ( 'delete_handle...' ) utilhandle . check_handle_syntax ( handle ) if len ( other ) > 0 : message = 'You specified more than one argument. If you wanted' + ' to delete just some values from a handle, please use the' + ' new method "delete_handle_value()".' ... | Delete the handle and its handle record . If the Handle is not found an Exception is raised . |
5,757 | def register_handle ( self , handle , location , checksum = None , additional_URLs = None , overwrite = False , ** extratypes ) : LOGGER . debug ( 'register_handle...' ) if overwrite == False : handlerecord_json = self . retrieve_handle_record_json ( handle ) if handlerecord_json is not None : msg = 'Could not register... | Registers a new Handle with given name . If the handle already exists and overwrite is not set to True the method will throw an exception . |
5,758 | def get_handlerecord_indices_for_key ( self , key , list_of_entries ) : LOGGER . debug ( 'get_handlerecord_indices_for_key...' ) indices = [ ] for entry in list_of_entries : if entry [ 'type' ] == key : indices . append ( entry [ 'index' ] ) return indices | Finds the Handle entry indices of all entries that have a specific type . |
5,759 | def __get_handle_record_if_necessary ( self , handle , handlerecord_json ) : if handlerecord_json is None : handlerecord_json = self . retrieve_handle_record_json ( handle ) else : if handle != handlerecord_json [ 'handle' ] : handlerecord_json = self . retrieve_handle_record_json ( handle ) return handlerecord_json | Returns the handle record if it is None or if its handle is not the same as the specified handle . |
5,760 | def __create_entry ( self , entrytype , data , index , ttl = None ) : if entrytype == 'HS_ADMIN' : op = 'creating HS_ADMIN entry' msg = 'This method can not create HS_ADMIN entries.' raise IllegalOperationException ( operation = op , msg = msg ) entry = { 'index' : index , 'type' : entrytype , 'data' : data } if ttl is... | Create an entry of any type except HS_ADMIN . |
5,761 | def __create_admin_entry ( self , handleowner , permissions , index , handle , ttl = None ) : if handleowner is None : adminindex = '200' prefix = handle . split ( '/' ) [ 0 ] adminhandle = '0.NA/' + prefix else : adminindex , adminhandle = utilhandle . remove_index_from_handle ( handleowner ) data = { 'value' : { 'ind... | Create an entry of type HS_ADMIN . |
5,762 | def auth_access ( self , auth_code ) : data = { 'client_id' : self . client_id , 'client_secret' : self . client_secret , 'grant_type' : 'authorization_code' , 'code' : auth_code , 'redirect_uri' : self . redirect_url } return self . request ( "post" , "access_token" , data = data ) | verify the fist authorization response url code |
5,763 | def check_if_username_exists ( self , username ) : LOGGER . debug ( 'check_if_username_exists...' ) _ , handle = b2handle . utilhandle . remove_index_from_handle ( username ) resp = self . send_handle_get_request ( handle ) resp_content = decoded_response ( resp ) if b2handle . hsresponses . does_handle_exist ( resp ) ... | Check if the username handles exists . |
5,764 | def publish_metric ( self , metric_name , metric_value , epoch_seconds = None ) : if epoch_seconds is None : epoch_seconds = self . _reactor . seconds ( ) self . _client_factory . publish_metric ( metric_name , metric_value , int ( epoch_seconds ) ) | Record a single hit on a given metric . |
5,765 | def register_repeating_metric ( self , metric_name , frequency , getter ) : l = task . LoopingCall ( self . _publish_repeating_metric , metric_name , getter ) repeating_metric_handle = RepeatingMetricHandle ( l , frequency ) self . _repeating_metric_handles . append ( repeating_metric_handle ) if self . running : repea... | Record hits to a metric at a specified interval . |
5,766 | def show ( ) : parent = None current = QtWidgets . QApplication . activeWindow ( ) while current : parent = current current = parent . parent ( ) window = ( _discover_gui ( ) or _show_no_gui ) ( parent ) return window | Try showing the most desirable GUI |
5,767 | def dock ( window ) : if self . _dock : print ( "Deleting existing dock..." ) parent = self . _dock dialog = None stacked_widget = None main_windows = [ ] while parent : if isinstance ( parent , QtWidgets . QDialog ) : dialog = parent if isinstance ( parent , QtWidgets . QStackedWidget ) : stacked_widget = parent if is... | Expecting a window to parent into a Nuke panel that is dockable . |
5,768 | def remove_index_from_handle ( handle_with_index ) : split = handle_with_index . split ( ':' ) if len ( split ) == 2 : split [ 0 ] = int ( split [ 0 ] ) return split elif len ( split ) == 1 : return ( None , handle_with_index ) elif len ( split ) > 2 : raise handleexceptions . HandleSyntaxError ( msg = 'Too many colons... | Returns index and handle separately in a tuple . |
5,769 | def create_authentication_string ( username , password ) : username_utf8 = username . encode ( 'utf-8' ) userpw_utf8 = password . encode ( 'utf-8' ) username_perc = quote ( username_utf8 ) userpw_perc = quote ( userpw_utf8 ) authinfostring = username_perc + ':' + userpw_perc authinfostring_base64 = base64 . b64encode (... | Creates an authentication string from the username and password . |
5,770 | def make_request_log_message ( ** args ) : mandatory_args = [ 'op' , 'handle' , 'url' , 'headers' , 'verify' , 'resp' ] optional_args = [ 'payload' ] util . check_presence_of_mandatory_args ( args , mandatory_args ) util . add_missing_optional_args_with_value_none ( args , optional_args ) space = '\n ' message = '' m... | Creates a string containing all relevant information about a request made to the Handle System for logging purposes . |
5,771 | def find_module ( fdr , fqname , path = None ) : if fqname in fdr . aliases : return Loader ( fqname , fdr . aliases [ fqname ] ) return None | Find a loader for module or package fqname . |
5,772 | def load_module ( ldr , fqname ) : scope = ldr . scope . split ( '.' ) modpath = fqname . split ( '.' ) if scope != modpath [ 0 : len ( scope ) ] : raise AssertionError ( "%s responsible for %s got request for %s" % ( ldr . __class__ . __name__ , ldr . scope , fqname , ) ) if fqname in sys . modules : mod = sys . modul... | Load fqname from under ldr . fspath . |
5,773 | def zthread_fork ( ctx , func , * args , ** kwargs ) : a = ctx . socket ( zmq . PAIR ) a . setsockopt ( zmq . LINGER , 0 ) a . setsockopt ( zmq . RCVHWM , 100 ) a . setsockopt ( zmq . SNDHWM , 100 ) a . setsockopt ( zmq . SNDTIMEO , 5000 ) a . setsockopt ( zmq . RCVTIMEO , 5000 ) b = ctx . socket ( zmq . PAIR ) b . set... | Create an attached thread . An attached thread gets a ctx and a PAIR pipe back to its parent . It must monitor its pipe and exit if the pipe becomes unreadable . Returns pipe or NULL if there was an error . |
5,774 | def _remap ( object , name , value , safe = True ) : if os . getenv ( "QT_TESTING" ) is not None and safe : if hasattr ( object , name ) : raise AttributeError ( "Cannot override existing name: " "%s.%s" % ( object . __name__ , name ) ) if type ( object ) . __name__ != "module" : raise AttributeError ( "%s != 'module':... | Prevent accidental assignment of existing members |
5,775 | def log_instantiation ( LOGGER , classname , args , forbidden , with_date = False ) : if with_date : LOGGER . info ( 'Instantiating ' + classname + ' at ' + datetime . datetime . now ( ) . strftime ( '%Y-%m-%d_%H:%M' ) ) else : LOGGER . info ( 'Instantiating ' + classname ) for argname in args : if args [ argname ] is ... | Log the instantiation of an object to the given logger . |
5,776 | def filter_params ( params ) : if params is not None : new_params = copy . deepcopy ( params ) new_params = dict ( ( k , v ) for k , v in new_params . items ( ) if v is not None ) for key , value in new_params . items ( ) : if isinstance ( value , bool ) : new_params [ key ] = "true" if value else "false" return new_pa... | convert dict value if value is bool type False - > false True - > true |
5,777 | def __set_revlookup_auth_string ( self , username , password ) : auth = b2handle . utilhandle . create_authentication_string ( username , password ) self . __revlookup_auth_string = auth | Creates and sets the authentication string for accessing the reverse lookup servlet . No return the string is set as an attribute to the client instance . |
5,778 | def load_from_JSON ( json_filename ) : try : jsonfilecontent = json . loads ( open ( json_filename , 'r' ) . read ( ) ) except ValueError as exc : raise CredentialsFormatError ( msg = "Invalid JSON syntax: " + str ( exc ) ) instance = PIDClientCredentials ( credentials_filename = json_filename , ** jsonfilecontent ) re... | Create a new instance of a PIDClientCredentials with information read from a local JSON file . |
5,779 | def fixture ( app , fixtures , fixtures_dir = 'fixtures' , raise_does_not_exist = False , reversible = True , models = [ ] ) : fixture_path = os . path . join ( app . __path__ [ 0 ] , fixtures_dir ) if isinstance ( fixtures , string_types ) : fixtures = [ fixtures ] def get_format ( fixture ) : return os . path . split... | Load fixtures using a data migration . |
5,780 | def nonzero ( self ) : return [ i for i in xrange ( self . size ( ) ) if self . test ( i ) ] | Get all non - zero bits |
5,781 | def tohexstring ( self ) : val = self . tostring ( ) st = "{0:0x}" . format ( int ( val , 2 ) ) return st . zfill ( len ( self . bitmap ) * 2 ) | Returns a hexadecimal string |
5,782 | def fromhexstring ( cls , hexstring ) : bitstring = format ( int ( hexstring , 16 ) , "0" + str ( len ( hexstring ) / 4 ) + "b" ) return cls . fromstring ( bitstring ) | Construct BitMap from hex string |
5,783 | def fromstring ( cls , bitstring ) : nbits = len ( bitstring ) bm = cls ( nbits ) for i in xrange ( nbits ) : if bitstring [ - i - 1 ] == '1' : bm . set ( i ) elif bitstring [ - i - 1 ] != '0' : raise Exception ( "Invalid bit string!" ) return bm | Construct BitMap from string |
5,784 | def get_valid_https_verify ( value ) : http_verify_value = value bool_values = { 'false' : False , 'true' : True } if isinstance ( value , bool ) : http_verify_value = value elif ( isinstance ( value , str ) or isinstance ( value , unicode ) ) and value . lower ( ) in bool_values . keys ( ) : http_verify_value = bool_v... | Get a value that can be the boolean representation of a string or a boolean itself and returns It as a boolean . If this is not the case It returns a string . |
5,785 | def setup ( self ) : super ( CleanCSSFilter , self ) . setup ( ) self . root = current_app . config . get ( 'COLLECT_STATIC_ROOT' ) | Initialize filter just before it will be used . |
5,786 | def rebase_opt ( self ) : if not hasattr ( self , '_rebase_opt' ) : out , err = Popen ( [ 'cleancss' , '--version' ] , stdout = PIPE ) . communicate ( ) ver = int ( out [ : out . index ( b'.' ) ] ) self . _rebase_opt = [ '--root' , self . root ] if ver == 3 else [ ] return self . _rebase_opt | Determine which option name to use . |
5,787 | def input ( self , _in , out , ** kw ) : args = [ self . binary or 'cleancss' ] + self . rebase_opt if self . extra_args : args . extend ( self . extra_args ) self . subprocess ( args , out , _in ) | Input filtering . |
5,788 | def output ( self , _in , out , ** kwargs ) : out . write ( 'angular.module("{0}", ["gettext"]).run(' '["gettextCatalog", function (gettextCatalog) {{' . format ( self . catalog_name ) ) out . write ( _in . read ( ) ) out . write ( '}]);' ) | Wrap translation in Angular module . |
5,789 | def input ( self , _in , out , ** kwargs ) : language_code = _re_language_code . search ( _in . read ( ) ) . group ( 'language_code' ) _in . seek ( 0 ) catalog = read_po ( _in ) out . write ( 'gettextCatalog.setStrings("{0}", ' . format ( language_code ) ) out . write ( json . dumps ( { key : value . string for key , v... | Process individual translation file . |
5,790 | def retrieve_info ( self ) : path = urlparse ( self . url ) . path path = path . split ( '/' ) [ 1 : ] sanity_filter = re . compile ( '[\da-z-_]+' , re . IGNORECASE ) self . product = sanity_filter . match ( path [ 0 ] ) . group ( 0 ) self . component = sanity_filter . match ( path [ 1 ] ) . group ( 0 ) self . issue_id... | Query the Github API to retrieve the needed infos . |
5,791 | def disk_cache ( cls , basename , function , * args , method = True , ** kwargs ) : @ utility . disk_cache ( basename , cls . directory ( ) , method = method ) def wrapper ( * args , ** kwargs ) : return function ( * args , ** kwargs ) return wrapper ( * args , ** kwargs ) | Cache the return value in the correct cache directory . Set method to false for static methods . |
5,792 | def download ( cls , url , filename = None ) : return utility . download ( url , cls . directory ( ) , filename ) | Download a file into the correct cache directory . |
5,793 | def directory ( cls , prefix = None ) : prefix = prefix or utility . read_config ( ) . directory name = cls . __name__ . lower ( ) directory = os . path . expanduser ( os . path . join ( prefix , name ) ) utility . ensure_directory ( directory ) return directory | Path that should be used for caching . Different for all subclasses . |
5,794 | def get_last_rconfiguration_id ( topic_id , remoteci_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn __TABLE = models . JOBS query = sql . select ( [ __TABLE . c . rconfiguration_id ] ) . order_by ( sql . desc ( __TABLE . c . created_at ) ) . where ( sql . and_ ( __TABLE . c . topic_id == topic_id , __... | Get the rconfiguration_id of the last job run by the remoteci . |
5,795 | def get_remoteci_configuration ( topic_id , remoteci_id , db_conn = None ) : db_conn = db_conn or flask . g . db_conn last_rconfiguration_id = get_last_rconfiguration_id ( topic_id , remoteci_id , db_conn = db_conn ) _RCONFIGURATIONS = models . REMOTECIS_RCONFIGURATIONS _J_RCONFIGURATIONS = models . JOIN_REMOTECIS_RCON... | Get a remoteci configuration . This will iterate over each configuration in a round robin manner depending on the last rconfiguration used by the remoteci . |
5,796 | def ignore_whitespace_text_nodes ( cls , wrapped_node ) : for child in wrapped_node . children : if child . is_text and child . value . strip ( ) == '' : child . delete ( ) else : cls . ignore_whitespace_text_nodes ( child ) | Find and delete any text nodes containing nothing but whitespace in in the given node and its descendents . |
5,797 | def verify_existence_and_get ( id , table , name = None , get_id = False ) : where_clause = table . c . id == id if name : where_clause = table . c . name == name if 'state' in table . columns : where_clause = sql . and_ ( table . c . state != 'archived' , where_clause ) query = sql . select ( [ table ] ) . where ( whe... | Verify the existence of a resource in the database and then return it if it exists according to the condition or raise an exception . |
5,798 | def user_topic_ids ( user ) : if user . is_super_admin ( ) or user . is_read_only_user ( ) : query = sql . select ( [ models . TOPICS ] ) else : query = ( sql . select ( [ models . JOINS_TOPICS_TEAMS . c . topic_id ] ) . select_from ( models . JOINS_TOPICS_TEAMS . join ( models . TOPICS , sql . and_ ( models . JOINS_TO... | Retrieve the list of topics IDs a user has access to . |
5,799 | def verify_team_in_topic ( user , topic_id ) : if user . is_super_admin ( ) or user . is_read_only_user ( ) : return if str ( topic_id ) not in user_topic_ids ( user ) : raise dci_exc . Unauthorized ( ) | Verify that the user s team does belongs to the given topic . If the user is an admin or read only user then it belongs to all topics . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.